--- a/xpcom/base/ErrorList.py
+++ b/xpcom/base/ErrorList.py
@@ -1,27 +1,29 @@
#!/usr/bin/env python
from collections import OrderedDict
+
class Mod:
"""
A nserror module. When used with a `with` statement, binds the itself to
Mod.active.
"""
active = None
def __init__(self, num):
self.num = num
def __enter__(self):
Mod.active = self
def __exit__(self, _type, _value, _traceback):
Mod.active = None
+
modules = OrderedDict()
# To add error code to your module, you need to do the following:
#
# 1) Add a module offset code. Add yours to the bottom of the list
# right below this comment, adding 1.
#
# 2) In your module, define a header file which uses one of the
@@ -90,25 +92,29 @@ modules["ERRORRESULT"] = Mod(43)
# the generic base.
modules["GENERAL"] = Mod(51)
MODULE_BASE_OFFSET = 0x45
NS_ERROR_SEVERITY_SUCCESS = 0
NS_ERROR_SEVERITY_ERROR = 1
+
def SUCCESS_OR_FAILURE(sev, module, code):
return (sev << 31) | ((module + MODULE_BASE_OFFSET) << 16) | code
+
def FAILURE(code):
return SUCCESS_OR_FAILURE(NS_ERROR_SEVERITY_ERROR, Mod.active.num, code)
+
def SUCCESS(code):
return SUCCESS_OR_FAILURE(NS_ERROR_SEVERITY_SUCCESS, Mod.active.num, code)
+
# Errors is an ordered dictionary, so that we can recover the order in which
# they were defined. This is important for determining which name is the
# canonical name for an error code.
errors = OrderedDict()
# Standard "it worked" return value
errors["NS_OK"] = 0
@@ -191,17 +197,16 @@ with modules["BASE"]:
errors["NS_BASE_STREAM_ILLEGAL_ARGS"] = FAILURE(4)
# For unichar streams
errors["NS_BASE_STREAM_NO_CONVERTER"] = FAILURE(5)
# For unichar streams
errors["NS_BASE_STREAM_BAD_CONVERSION"] = FAILURE(6)
errors["NS_BASE_STREAM_WOULD_BLOCK"] = FAILURE(7)
-
# =======================================================================
# 3: NS_ERROR_MODULE_GFX
# =======================================================================
with modules["GFX"]:
# no printer available (e.g. cannot find _any_ printer)
errors["NS_ERROR_GFX_PRINTER_NO_PRINTER_AVAILABLE"] = FAILURE(1)
# _specified_ (by name) printer not found
errors["NS_ERROR_GFX_PRINTER_NAME_NOT_FOUND"] = FAILURE(2)
@@ -215,17 +220,16 @@ with modules["GFX"]:
errors["NS_ERROR_GFX_PRINTER_STARTPAGE"] = FAILURE(6)
# The document is still being loaded
errors["NS_ERROR_GFX_PRINTER_DOC_IS_BUSY"] = FAILURE(7)
# Font cmap is strangely structured - avoid this font!
errors["NS_ERROR_GFX_CMAP_MALFORMED"] = FAILURE(51)
-
# =======================================================================
# 4: NS_ERROR_MODULE_WIDGET
# =======================================================================
with modules["WIDGET"]:
# Used by:
# - nsIWidget::NotifyIME()
# - nsIWidget::OnWindowedPluginKeyEvent()
# Returned when the notification or the event is handled and it's consumed
@@ -233,17 +237,16 @@ with modules["WIDGET"]:
errors["NS_SUCCESS_EVENT_CONSUMED"] = SUCCESS(1)
# Used by:
# - nsIWidget::OnWindowedPluginKeyEvent()
# Returned when the event is handled correctly but the result will be
# notified asynchronously.
errors["NS_SUCCESS_EVENT_HANDLED_ASYNCHRONOUSLY"] = SUCCESS(2)
-
# =======================================================================
# 6: NS_ERROR_MODULE_NETWORK
# =======================================================================
with modules["NETWORK"]:
# General async request error codes:
#
# These error codes are commonly passed through callback methods to indicate
# the status of some requested async request.
@@ -352,17 +355,16 @@ with modules["NETWORK"]:
# document from a website that is not in its white-list.
errors["NS_ERROR_REMOTE_XUL"] = FAILURE(75)
# The request resulted in an error page being displayed.
errors["NS_ERROR_LOAD_SHOWED_ERRORPAGE"] = FAILURE(77)
# The request occurred in docshell that lacks a treeowner, so it is
# probably in the process of being torn down.
errors["NS_ERROR_DOCSHELL_DYING"] = FAILURE(78)
-
# FTP specific error codes:
errors["NS_ERROR_FTP_LOGIN"] = FAILURE(21)
errors["NS_ERROR_FTP_CWD"] = FAILURE(22)
errors["NS_ERROR_FTP_PASV"] = FAILURE(23)
errors["NS_ERROR_FTP_PWD"] = FAILURE(24)
errors["NS_ERROR_FTP_LIST"] = FAILURE(28)
@@ -374,17 +376,16 @@ with modules["NETWORK"]:
# A low or medium priority DNS lookup failed because the pending queue was
# already full. High priorty (the default) always makes room
errors["NS_ERROR_DNS_LOOKUP_QUEUE_FULL"] = FAILURE(33)
# The lookup of a proxy hostname failed. If a channel is configured to
# speak to a proxy server, then it will generate this error if the proxy
# hostname cannot be resolved.
errors["NS_ERROR_UNKNOWN_PROXY_HOST"] = FAILURE(42)
-
# Socket specific error codes:
# The specified socket type does not exist.
errors["NS_ERROR_UNKNOWN_SOCKET_TYPE"] = FAILURE(51)
# The specified socket type could not be created.
errors["NS_ERROR_SOCKET_CREATE_FAILED"] = FAILURE(52)
# The operating system doesn't support the given type of address.
errors["NS_ERROR_SOCKET_ADDRESS_NOT_SUPPORTED"] = FAILURE(53)
@@ -407,17 +408,16 @@ with modules["NETWORK"]:
# Effective TLD Service specific error codes:
# The requested number of domain levels exceeds those present in the host
# string.
errors["NS_ERROR_INSUFFICIENT_DOMAIN_LEVELS"] = FAILURE(80)
# The host string is an IP address.
errors["NS_ERROR_HOST_IS_IP_ADDRESS"] = FAILURE(81)
-
# StreamLoader specific result codes:
# Result code returned by nsIStreamLoaderObserver to indicate that the
# observer is taking over responsibility for the data buffer, and the loader
# should NOT free it.
errors["NS_SUCCESS_ADOPTED_DATA"] = SUCCESS(90)
# FTP
@@ -446,42 +446,39 @@ with modules["NETWORK"]:
errors["NS_NET_STATUS_WAITING_FOR"] = FAILURE(10)
errors["NS_NET_STATUS_RECEIVING_FROM"] = FAILURE(6)
# nsIInterceptedChannel
# Generic error for non-specific failures during service worker interception
errors["NS_ERROR_INTERCEPTION_FAILED"] = FAILURE(100)
-
# =======================================================================
# 7: NS_ERROR_MODULE_PLUGINS
# =======================================================================
with modules["PLUGINS"]:
errors["NS_ERROR_PLUGINS_PLUGINSNOTCHANGED"] = FAILURE(1000)
errors["NS_ERROR_PLUGIN_DISABLED"] = FAILURE(1001)
errors["NS_ERROR_PLUGIN_BLOCKLISTED"] = FAILURE(1002)
errors["NS_ERROR_PLUGIN_TIME_RANGE_NOT_SUPPORTED"] = FAILURE(1003)
errors["NS_ERROR_PLUGIN_CLICKTOPLAY"] = FAILURE(1004)
-
# =======================================================================
# 8: NS_ERROR_MODULE_LAYOUT
# =======================================================================
with modules["LAYOUT"]:
# Return code for nsITableLayout
errors["NS_TABLELAYOUT_CELL_NOT_FOUND"] = SUCCESS(0)
# Return code for SheetLoadData::VerifySheetReadyToParse
errors["NS_OK_PARSE_SHEET"] = SUCCESS(1)
# Return code for nsFrame::GetNextPrevLineFromeBlockFrame
errors["NS_POSITION_BEFORE_TABLE"] = SUCCESS(3)
-
# =======================================================================
# 9: NS_ERROR_MODULE_HTMLPARSER
# =======================================================================
with modules["HTMLPARSER"]:
errors["NS_ERROR_HTMLPARSER_CONTINUE"] = errors["NS_OK"]
errors["NS_ERROR_HTMLPARSER_EOF"] = FAILURE(1000)
errors["NS_ERROR_HTMLPARSER_UNKNOWN"] = FAILURE(1001)
@@ -503,17 +500,16 @@ with modules["HTMLPARSER"]:
errors["NS_ERROR_HTMLPARSER_HIERARCHYTOODEEP"] = FAILURE(1017)
errors["NS_ERROR_HTMLPARSER_FAKE_ENDTAG"] = FAILURE(1018)
errors["NS_ERROR_HTMLPARSER_INVALID_COMMENT"] = FAILURE(1019)
errors["NS_HTMLTOKENS_NOT_AN_ENTITY"] = SUCCESS(2000)
errors["NS_HTMLPARSER_VALID_META_CHARSET"] = SUCCESS(3000)
-
# =======================================================================
# 10: NS_ERROR_MODULE_RDF
# =======================================================================
with modules["RDF"]:
# Returned from nsIRDFDataSource::Assert() and Unassert() if the assertion
# (or unassertion was accepted by the datasource
errors["NS_RDF_ASSERTION_ACCEPTED"] = errors["NS_OK"]
# Returned from nsIRDFCursor::Advance() if the cursor has no more
@@ -525,17 +521,16 @@ with modules["RDF"]:
# Returned from nsIRDFDataSource::Assert() and Unassert() if the assertion
# (or unassertion) was rejected by the datasource; i.e., the datasource was
# not willing to record the statement.
errors["NS_RDF_ASSERTION_REJECTED"] = SUCCESS(3)
# Return this from rdfITripleVisitor to stop cycling
errors["NS_RDF_STOP_VISIT"] = SUCCESS(4)
-
# =======================================================================
# 11: NS_ERROR_MODULE_UCONV
# =======================================================================
with modules["UCONV"]:
errors["NS_ERROR_UCONV_NOCONV"] = FAILURE(1)
errors["NS_ERROR_UDEC_ILLEGALINPUT"] = FAILURE(14)
errors["NS_OK_HAD_REPLACEMENTS"] = SUCCESS(3)
@@ -552,17 +547,16 @@ with modules["UCONV"]:
# BEGIN DEPRECATED
errors["NS_EXACT_LENGTH"] = errors["NS_OK_UDEC_EXACTLENGTH"]
errors["NS_PARTIAL_MORE_INPUT"] = errors["NS_OK_UDEC_MOREINPUT"]
errors["NS_PARTIAL_MORE_OUTPUT"] = errors["NS_OK_UDEC_MOREOUTPUT"]
errors["NS_ERROR_ILLEGAL_INPUT"] = errors["NS_ERROR_UDEC_ILLEGALINPUT"]
# END DEPRECATED
-
# =======================================================================
# 13: NS_ERROR_MODULE_FILES
# =======================================================================
with modules["FILES"]:
errors["NS_ERROR_FILE_UNRECOGNIZED_PATH"] = FAILURE(1)
errors["NS_ERROR_FILE_UNRESOLVABLE_SYMLINK"] = FAILURE(2)
errors["NS_ERROR_FILE_EXECUTION_FAILED"] = FAILURE(3)
errors["NS_ERROR_FILE_UNKNOWN_TYPE"] = FAILURE(4)
@@ -584,17 +578,16 @@ with modules["FILES"]:
errors["NS_ERROR_FILE_DIR_NOT_EMPTY"] = FAILURE(20)
errors["NS_ERROR_FILE_ACCESS_DENIED"] = FAILURE(21)
errors["NS_SUCCESS_FILE_DIRECTORY_EMPTY"] = SUCCESS(1)
# Result codes used by nsIDirectoryServiceProvider2
errors["NS_SUCCESS_AGGREGATE_RESULT"] = SUCCESS(2)
-
# =======================================================================
# 14: NS_ERROR_MODULE_DOM
# =======================================================================
with modules["DOM"]:
# XXX If you add a new DOM error code, also add an error string to
# dom/base/domerr.msg
# Standard DOM error codes: http://dvcs.w3.org/hg/domcore/raw-file/tip/Overview.html
@@ -654,19 +647,22 @@ with modules["DOM"]:
errors["NS_ERROR_DOM_MALFORMED_URI"] = FAILURE(1016)
errors["NS_ERROR_DOM_INVALID_HEADER_NAME"] = FAILURE(1017)
errors["NS_ERROR_DOM_INVALID_STATE_XHR_HAS_INVALID_CONTEXT"] = FAILURE(1018)
errors["NS_ERROR_DOM_INVALID_STATE_XHR_MUST_BE_OPENED"] = FAILURE(1019)
errors["NS_ERROR_DOM_INVALID_STATE_XHR_MUST_NOT_BE_SENDING"] = FAILURE(1020)
errors["NS_ERROR_DOM_INVALID_STATE_XHR_MUST_NOT_BE_LOADING_OR_DONE"] = FAILURE(1021)
errors["NS_ERROR_DOM_INVALID_STATE_XHR_HAS_WRONG_RESPONSETYPE_FOR_RESPONSEXML"] = FAILURE(1022)
- errors["NS_ERROR_DOM_INVALID_STATE_XHR_HAS_WRONG_RESPONSETYPE_FOR_RESPONSETEXT"] = FAILURE(1023)
- errors["NS_ERROR_DOM_INVALID_STATE_XHR_CHUNKED_RESPONSETYPES_UNSUPPORTED_FOR_SYNC"] = FAILURE(1024)
- errors["NS_ERROR_DOM_INVALID_ACCESS_XHR_TIMEOUT_AND_RESPONSETYPE_UNSUPPORTED_FOR_SYNC"] = FAILURE(1025)
+ errors["NS_ERROR_DOM_INVALID_STATE_XHR_HAS_WRONG_RESPONSETYPE_FOR_RESPONSETEXT"] = FAILURE(
+ 1023)
+ errors["NS_ERROR_DOM_INVALID_STATE_XHR_CHUNKED_RESPONSETYPES_UNSUPPORTED_FOR_SYNC"] = FAILURE(
+ 1024)
+ errors["NS_ERROR_DOM_INVALID_ACCESS_XHR_TIMEOUT_AND_RESPONSETYPE_UNSUPPORTED_FOR_SYNC"] = FAILURE(
+ 1025)
# When manipulating the bytecode cache with the JS API, some transcoding
# errors, such as a different bytecode format can cause failures of the
# decoding process.
errors["NS_ERROR_DOM_JS_DECODING_ERROR"] = FAILURE(1026)
# May be used to indicate when e.g. setting a property value didn't
# actually change the value, like for obj.foo = "bar"; obj.foo = "bar";
@@ -679,41 +675,38 @@ with modules["DOM"]:
errors["NS_SUCCESS_DOM_SCRIPT_EVALUATION_THREW"] = SUCCESS(2)
# A success code that indicates that evaluating a string of JS went
# just fine except it was killed by an uncatchable exception.
# Only for legacy use by nsJSUtils.
errors["NS_SUCCESS_DOM_SCRIPT_EVALUATION_THREW_UNCATCHABLE"] = SUCCESS(3)
-
# =======================================================================
# 15: NS_ERROR_MODULE_IMGLIB
# =======================================================================
with modules["IMGLIB"]:
errors["NS_IMAGELIB_SUCCESS_LOAD_FINISHED"] = SUCCESS(0)
errors["NS_IMAGELIB_ERROR_FAILURE"] = FAILURE(5)
errors["NS_IMAGELIB_ERROR_NO_DECODER"] = FAILURE(6)
errors["NS_IMAGELIB_ERROR_NOT_FINISHED"] = FAILURE(7)
errors["NS_IMAGELIB_ERROR_NO_ENCODER"] = FAILURE(9)
-
# =======================================================================
# 17: NS_ERROR_MODULE_EDITOR
# =======================================================================
with modules["EDITOR"]:
errors["NS_ERROR_EDITOR_DESTROYED"] = FAILURE(1)
errors["NS_SUCCESS_EDITOR_ELEMENT_NOT_FOUND"] = SUCCESS(1)
errors["NS_SUCCESS_EDITOR_FOUND_TARGET"] = SUCCESS(2)
-
# =======================================================================
# 18: NS_ERROR_MODULE_XPCONNECT
# =======================================================================
with modules["XPCONNECT"]:
errors["NS_ERROR_XPC_NOT_ENOUGH_ARGS"] = FAILURE(1)
errors["NS_ERROR_XPC_NEED_OUT_OBJECT"] = FAILURE(2)
errors["NS_ERROR_XPC_CANT_SET_OUT_VAL"] = FAILURE(3)
errors["NS_ERROR_XPC_NATIVE_RETURNED_FAILURE"] = FAILURE(4)
@@ -765,25 +758,23 @@ with modules["XPCONNECT"]:
errors["NS_ERROR_XPC_BAD_INITIALIZER_NAME"] = FAILURE(50)
errors["NS_ERROR_XPC_HAS_BEEN_SHUTDOWN"] = FAILURE(51)
errors["NS_ERROR_XPC_CANT_MODIFY_PROP_ON_WN"] = FAILURE(52)
errors["NS_ERROR_XPC_BAD_CONVERT_JS_ZERO_ISNOT_NULL"] = FAILURE(53)
errors["NS_ERROR_XPC_CANT_PASS_CPOW_TO_NATIVE"] = FAILURE(54)
# any new errors here should have an associated entry added in xpc.msg
-
# =======================================================================
# 19: NS_ERROR_MODULE_PROFILE
# =======================================================================
with modules["PROFILE"]:
errors["NS_ERROR_LAUNCHED_CHILD_PROCESS"] = FAILURE(200)
-
# =======================================================================
# 21: NS_ERROR_MODULE_SECURITY
# =======================================================================
with modules["SECURITY"]:
# Error code for CSP
errors["NS_ERROR_CSP_FORM_ACTION_VIOLATION"] = FAILURE(98)
errors["NS_ERROR_CSP_FRAME_ANCESTOR_VIOLATION"] = FAILURE(99)
@@ -810,27 +801,25 @@ with modules["SECURITY"]:
errors["NS_ERROR_CMS_VERIFY_MALFORMED_SIGNATURE"] = FAILURE(1037)
errors["NS_ERROR_CMS_VERIFY_HEADER_MISMATCH"] = FAILURE(1038)
errors["NS_ERROR_CMS_VERIFY_NOT_YET_ATTEMPTED"] = FAILURE(1039)
errors["NS_ERROR_CMS_VERIFY_CERT_WITHOUT_ADDRESS"] = FAILURE(1040)
errors["NS_ERROR_CMS_ENCRYPT_NO_BULK_ALG"] = FAILURE(1056)
errors["NS_ERROR_CMS_ENCRYPT_INCOMPLETE"] = FAILURE(1057)
-
# =======================================================================
# 22: NS_ERROR_MODULE_DOM_XPATH
# =======================================================================
with modules["DOM_XPATH"]:
# DOM error codes from http://www.w3.org/TR/DOM-Level-3-XPath/
errors["NS_ERROR_DOM_INVALID_EXPRESSION_ERR"] = FAILURE(51)
errors["NS_ERROR_DOM_TYPE_ERR"] = FAILURE(52)
-
# =======================================================================
# 24: NS_ERROR_MODULE_URILOADER
# =======================================================================
with modules["URILOADER"]:
errors["NS_ERROR_WONT_HANDLE_CONTENT"] = FAILURE(1)
# The load has been cancelled because it was found on a malware or phishing
# blacklist.
errors["NS_ERROR_MALWARE_URI"] = FAILURE(30)
@@ -846,17 +835,16 @@ with modules["URILOADER"]:
# doesn't need to be reparsed from the original source.
errors["NS_ERROR_PARSED_DATA_CACHED"] = FAILURE(33)
# This success code indicates that a refresh header was found and
# successfully setup.
errors["NS_REFRESHURI_HEADER_FOUND"] = SUCCESS(2)
-
# =======================================================================
# 25: NS_ERROR_MODULE_CONTENT
# =======================================================================
with modules["CONTENT"]:
# Error codes for content policy blocking
errors["NS_ERROR_CONTENT_BLOCKED"] = FAILURE(6)
errors["NS_ERROR_CONTENT_BLOCKED_SHOW_ALT"] = FAILURE(7)
# Success variations of content policy blocking
@@ -875,17 +863,16 @@ with modules["CONTENT"]:
errors["NS_CONTENT_BLOCKED_SHOW_ALT"] = SUCCESS(9)
errors["NS_PROPTABLE_PROP_OVERWRITTEN"] = SUCCESS(11)
# Error codes for FindBroadcaster in XULDocument.cpp
errors["NS_FINDBROADCASTER_NOT_FOUND"] = SUCCESS(12)
errors["NS_FINDBROADCASTER_FOUND"] = SUCCESS(13)
errors["NS_FINDBROADCASTER_AWAIT_OVERLAYS"] = SUCCESS(14)
-
# =======================================================================
# 27: NS_ERROR_MODULE_XSLT
# =======================================================================
with modules["XSLT"]:
errors["NS_ERROR_XPATH_INVALID_ARG"] = errors["NS_ERROR_INVALID_ARG"]
errors["NS_ERROR_XSLT_PARSE_FAILURE"] = FAILURE(1)
errors["NS_ERROR_XPATH_PARSE_FAILURE"] = FAILURE(2)
@@ -918,17 +905,16 @@ with modules["XSLT"]:
errors["NS_ERROR_XPATH_UNBALANCED_CURLY_BRACE"] = FAILURE(29)
errors["NS_ERROR_XSLT_BAD_NODE_NAME"] = FAILURE(30)
errors["NS_ERROR_XSLT_VAR_ALREADY_SET"] = FAILURE(31)
errors["NS_ERROR_XSLT_CALL_TO_KEY_NOT_ALLOWED"] = FAILURE(32)
errors["NS_XSLT_GET_NEW_HANDLER"] = SUCCESS(1)
-
# =======================================================================
# 28: NS_ERROR_MODULE_IPC
# =======================================================================
with modules["IPC"]:
# Initial creation of a Transport object failed internally for unknown reasons.
errors["NS_ERROR_TRANSPORT_INIT"] = FAILURE(1)
# Generic error related to duplicating handle failures.
errors["NS_ERROR_DUPLICATE_HANDLE"] = FAILURE(2)
@@ -943,17 +929,16 @@ with modules["IPC"]:
# =======================================================================
with modules["SVG"]:
# SVG DOM error codes from http://www.w3.org/TR/SVG11/svgdom.html
errors["NS_ERROR_DOM_SVG_WRONG_TYPE_ERR"] = FAILURE(0)
# Yes, the spec says "INVERTABLE", not "INVERTIBLE"
errors["NS_ERROR_DOM_SVG_MATRIX_NOT_INVERTABLE"] = FAILURE(2)
-
# =======================================================================
# 30: NS_ERROR_MODULE_STORAGE
# =======================================================================
with modules["STORAGE"]:
# To add additional errors to Storage, please append entries to the bottom
# of the list in the following format:
# NS_ERROR_STORAGE_YOUR_ERR, FAILURE(n)
# where n is the next unique positive integer. You must also add an entry
@@ -961,27 +946,25 @@ with modules["STORAGE"]:
# comment 'storage related codes (from mozStorage.h)', in the following
# format: 'XPC_MSG_DEF(NS_ERROR_STORAGE_YOUR_ERR, "brief description of your
# error")'
errors["NS_ERROR_STORAGE_BUSY"] = FAILURE(1)
errors["NS_ERROR_STORAGE_IOERR"] = FAILURE(2)
errors["NS_ERROR_STORAGE_CONSTRAINT"] = FAILURE(3)
-
# =======================================================================
# 32: NS_ERROR_MODULE_DOM_FILE
# =======================================================================
with modules["DOM_FILE"]:
errors["NS_ERROR_DOM_FILE_NOT_FOUND_ERR"] = FAILURE(0)
errors["NS_ERROR_DOM_FILE_NOT_READABLE_ERR"] = FAILURE(1)
errors["NS_ERROR_DOM_FILE_ABORT_ERR"] = FAILURE(2)
-
# =======================================================================
# 33: NS_ERROR_MODULE_DOM_INDEXEDDB
# =======================================================================
with modules["DOM_INDEXEDDB"]:
# IndexedDB error codes http://dvcs.w3.org/hg/IndexedDB/raw-file/tip/Overview.html
errors["NS_ERROR_DOM_INDEXEDDB_UNKNOWN_ERR"] = FAILURE(1)
errors["NS_ERROR_DOM_INDEXEDDB_NOT_FOUND_ERR"] = FAILURE(3)
errors["NS_ERROR_DOM_INDEXEDDB_CONSTRAINT_ERR"] = FAILURE(4)
@@ -991,17 +974,16 @@ with modules["DOM_INDEXEDDB"]:
errors["NS_ERROR_DOM_INDEXEDDB_ABORT_ERR"] = FAILURE(8)
errors["NS_ERROR_DOM_INDEXEDDB_READ_ONLY_ERR"] = FAILURE(9)
errors["NS_ERROR_DOM_INDEXEDDB_TIMEOUT_ERR"] = FAILURE(10)
errors["NS_ERROR_DOM_INDEXEDDB_QUOTA_ERR"] = FAILURE(11)
errors["NS_ERROR_DOM_INDEXEDDB_VERSION_ERR"] = FAILURE(12)
errors["NS_ERROR_DOM_INDEXEDDB_RECOVERABLE_ERR"] = FAILURE(1001)
-
# =======================================================================
# 34: NS_ERROR_MODULE_DOM_FILEHANDLE
# =======================================================================
with modules["DOM_FILEHANDLE"]:
errors["NS_ERROR_DOM_FILEHANDLE_UNKNOWN_ERR"] = FAILURE(1)
errors["NS_ERROR_DOM_FILEHANDLE_NOT_ALLOWED_ERR"] = FAILURE(2)
errors["NS_ERROR_DOM_FILEHANDLE_INACTIVE_ERR"] = FAILURE(3)
errors["NS_ERROR_DOM_FILEHANDLE_ABORT_ERR"] = FAILURE(4)
@@ -1150,17 +1132,16 @@ with modules["GENERAL"]:
errors["NS_SUCCESS_DONT_FIXUP"] = SUCCESS(1)
# This success code may be returned by nsIAppStartup::Run to indicate that
# the application should be restarted. This condition corresponds to the
# case in which nsIAppStartup::Quit was called with the eRestart flag.
errors["NS_SUCCESS_RESTART_APP"] = SUCCESS(1)
errors["NS_SUCCESS_RESTART_APP_NOT_SAME_PROFILE"] = SUCCESS(3)
errors["NS_SUCCESS_UNORM_NOTFOUND"] = SUCCESS(17)
-
# a11y
# raised when current pivot's position is needed but it's not in the tree
errors["NS_ERROR_NOT_IN_TREE"] = FAILURE(38)
# see nsTextEquivUtils
errors["NS_OK_NO_NAME_CLAUSE_HANDLED"] = SUCCESS(34)
# Error code used to indicate that functionality has been blocked by the
@@ -1204,16 +1185,17 @@ enum class nsresult : uint32_t
output.write("""
const nsresult
{}
;
#endif // ErrorList_h__
""".format(",\n".join(items)))
+
def error_names_internal_h(output):
"""Generate ErrorNamesInternal.h, which is a header file declaring one
function, const char* GetErrorNameInternal(nsresult). This method is not
intended to be used by consumer code, which should instead call
GetErrorName in ErrorNames.h."""
output.write("""
/* THIS FILE IS GENERATED BY ErrorList.py - DO NOT EDIT */
--- a/xpcom/build/Services.py
+++ b/xpcom/build/Services.py
@@ -1,15 +1,18 @@
#!/usr/bin/env python
services = []
+
+
def service(name, iface, contractid):
"""Define a convenient service getter"""
services.append((name, iface, contractid))
+
service('ChromeRegistryService', 'nsIChromeRegistry',
"@mozilla.org/chrome/chrome-registry;1")
service('ToolkitChromeRegistryService', 'nsIToolkitChromeRegistry',
"@mozilla.org/chrome/chrome-registry;1")
service('XULChromeRegistryService', 'nsIXULChromeRegistry',
"@mozilla.org/chrome/chrome-registry;1")
service('XULOverlayProviderService', 'nsIXULOverlayProvider',
"@mozilla.org/chrome/chrome-registry;1")
@@ -79,16 +82,17 @@ CPP_INCLUDES = """
# Codegen Logic
#
# The following code consumes the data listed above to generate the files
# Services.h, Services.cpp, and services.rs which provide access to these
# service getters in both rust and C++ code.
#
# XXX(nika): would it be a good idea to unify Services.jsm into here too?
+
def services_h(output):
output.write("""\
/* THIS FILE IS GENERATED BY Services.py - DO NOT EDIT */
#ifndef mozilla_Services_h
#define mozilla_Services_h
#include "nscore.h"
@@ -125,19 +129,19 @@ inline already_AddRefed<%(type)s>
Get%(name)s()
{
return already_AddRefed<%(type)s>(XPCOMService_Get%(name)s());
}
} // namespace services
} // namespace mozilla
#endif // defined(MOZILLA_INTERNAL_API)
""" % {
- 'name': name,
- 'type': iface,
-})
+ 'name': name,
+ 'type': iface,
+ })
output.write("#endif // !defined(mozilla_Services_h)\n")
def services_cpp(output):
output.write("""\
/* THIS FILE IS GENERATED BY Services.py - DO NOT EDIT */
""")
@@ -162,20 +166,20 @@ XPCOMService_Get%(name)s()
if (!g%(name)s) {
nsCOMPtr<%(type)s> os = do_GetService("%(contractid)s");
os.swap(g%(name)s);
}
return do_AddRef(g%(name)s).take();
}
} // extern "C"
""" % {
- 'name': name,
- 'type': iface,
- 'contractid': contractid,
-})
+ 'name': name,
+ 'type': iface,
+ 'contractid': contractid,
+ })
output.write("""
/**
* Clears service cache, sets gXPCOMShuttingDown
*/
void
mozilla::services::Shutdown()
{
@@ -203,11 +207,11 @@ use RefPtr;
/// This function will return `None` during XPCOM shutdown.
pub fn get_%(name)s() -> Option<RefPtr<::interfaces::%(type)s>> {
extern "C" {
fn XPCOMService_Get%(name)s() -> *mut ::interfaces::%(type)s;
}
unsafe { RefPtr::from_raw_dont_addref(XPCOMService_Get%(name)s()) }
}
""" % {
- 'name': name,
- 'type': iface,
-})
+ 'name': name,
+ 'type': iface,
+ })
--- a/xpcom/ds/make_dafsa.py
+++ b/xpcom/ds/make_dafsa.py
@@ -189,294 +189,296 @@ The bytes in the generated array has the
8: 0x62 <char> label character 0x62 -> match "b"
9: 0x62 <char> label character 0x62 -> match "b"
10: 0x82 <return_value> 0x82 & 0x0F -> return 2
"""
import sys
+
class InputError(Exception):
- """Exception raised for errors in the input file."""
+ """Exception raised for errors in the input file."""
def to_dafsa(words):
- """Generates a DAFSA from a word list and returns the source node.
+ """Generates a DAFSA from a word list and returns the source node.
+
+ Each word is split into characters so that each character is represented by
+ a unique node. It is assumed the word list is not empty.
+ """
+ if not words:
+ raise InputError('The domain list must not be empty')
- Each word is split into characters so that each character is represented by
- a unique node. It is assumed the word list is not empty.
- """
- if not words:
- raise InputError('The domain list must not be empty')
- def ToNodes(word):
- """Split words into characters"""
- if not 0x1F < ord(word[0]) < 0x80:
- raise InputError('Domain names must be printable 7-bit ASCII')
- if len(word) == 1:
- return chr(ord(word[0]) & 0x0F), [None]
- return word[0], [ToNodes(word[1:])]
- return [ToNodes(word) for word in words]
+ def ToNodes(word):
+ """Split words into characters"""
+ if not 0x1F < ord(word[0]) < 0x80:
+ raise InputError('Domain names must be printable 7-bit ASCII')
+ if len(word) == 1:
+ return chr(ord(word[0]) & 0x0F), [None]
+ return word[0], [ToNodes(word[1:])]
+ return [ToNodes(word) for word in words]
def to_words(node):
- """Generates a word list from all paths starting from an internal node."""
- if not node:
- return ['']
- return [(node[0] + word) for child in node[1] for word in to_words(child)]
+ """Generates a word list from all paths starting from an internal node."""
+ if not node:
+ return ['']
+ return [(node[0] + word) for child in node[1] for word in to_words(child)]
def reverse(dafsa):
- """Generates a new DAFSA that is reversed, so that the old sink node becomes
- the new source node.
- """
- sink = []
- nodemap = {}
-
- def dfs(node, parent):
- """Creates reverse nodes.
-
- A new reverse node will be created for each old node. The new node will
- get a reversed label and the parents of the old node as children.
+ """Generates a new DAFSA that is reversed, so that the old sink node becomes
+ the new source node.
"""
- if not node:
- sink.append(parent)
- elif id(node) not in nodemap:
- nodemap[id(node)] = (node[0][::-1], [parent])
- for child in node[1]:
- dfs(child, nodemap[id(node)])
- else:
- nodemap[id(node)][1].append(parent)
+ sink = []
+ nodemap = {}
+
+ def dfs(node, parent):
+ """Creates reverse nodes.
- for node in dafsa:
- dfs(node, None)
- return sink
+ A new reverse node will be created for each old node. The new node will
+ get a reversed label and the parents of the old node as children.
+ """
+ if not node:
+ sink.append(parent)
+ elif id(node) not in nodemap:
+ nodemap[id(node)] = (node[0][::-1], [parent])
+ for child in node[1]:
+ dfs(child, nodemap[id(node)])
+ else:
+ nodemap[id(node)][1].append(parent)
+
+ for node in dafsa:
+ dfs(node, None)
+ return sink
def join_labels(dafsa):
- """Generates a new DAFSA where internal nodes are merged if there is a one to
- one connection.
- """
- parentcount = { id(None): 2 }
- nodemap = { id(None): None }
+ """Generates a new DAFSA where internal nodes are merged if there is a one to
+ one connection.
+ """
+ parentcount = {id(None): 2}
+ nodemap = {id(None): None}
- def count_parents(node):
- """Count incoming references"""
- if id(node) in parentcount:
- parentcount[id(node)] += 1
- else:
- parentcount[id(node)] = 1
- for child in node[1]:
- count_parents(child)
+ def count_parents(node):
+ """Count incoming references"""
+ if id(node) in parentcount:
+ parentcount[id(node)] += 1
+ else:
+ parentcount[id(node)] = 1
+ for child in node[1]:
+ count_parents(child)
- def join(node):
- """Create new nodes"""
- if id(node) not in nodemap:
- children = [join(child) for child in node[1]]
- if len(children) == 1 and parentcount[id(node[1][0])] == 1:
- child = children[0]
- nodemap[id(node)] = (node[0] + child[0], child[1])
- else:
- nodemap[id(node)] = (node[0], children)
- return nodemap[id(node)]
+ def join(node):
+ """Create new nodes"""
+ if id(node) not in nodemap:
+ children = [join(child) for child in node[1]]
+ if len(children) == 1 and parentcount[id(node[1][0])] == 1:
+ child = children[0]
+ nodemap[id(node)] = (node[0] + child[0], child[1])
+ else:
+ nodemap[id(node)] = (node[0], children)
+ return nodemap[id(node)]
- for node in dafsa:
- count_parents(node)
- return [join(node) for node in dafsa]
+ for node in dafsa:
+ count_parents(node)
+ return [join(node) for node in dafsa]
def join_suffixes(dafsa):
- """Generates a new DAFSA where nodes that represent the same word lists
- towards the sink are merged.
- """
- nodemap = { frozenset(('',)): None }
+ """Generates a new DAFSA where nodes that represent the same word lists
+ towards the sink are merged.
+ """
+ nodemap = {frozenset(('',)): None}
- def join(node):
- """Returns a macthing node. A new node is created if no matching node
- exists. The graph is accessed in dfs order.
- """
- suffixes = frozenset(to_words(node))
- if suffixes not in nodemap:
- nodemap[suffixes] = (node[0], [join(child) for child in node[1]])
- return nodemap[suffixes]
+ def join(node):
+ """Returns a macthing node. A new node is created if no matching node
+ exists. The graph is accessed in dfs order.
+ """
+ suffixes = frozenset(to_words(node))
+ if suffixes not in nodemap:
+ nodemap[suffixes] = (node[0], [join(child) for child in node[1]])
+ return nodemap[suffixes]
- return [join(node) for node in dafsa]
+ return [join(node) for node in dafsa]
def top_sort(dafsa):
- """Generates list of nodes in topological sort order."""
- incoming = {}
+ """Generates list of nodes in topological sort order."""
+ incoming = {}
- def count_incoming(node):
- """Counts incoming references."""
- if node:
- if id(node) not in incoming:
- incoming[id(node)] = 1
- for child in node[1]:
- count_incoming(child)
- else:
- incoming[id(node)] += 1
+ def count_incoming(node):
+ """Counts incoming references."""
+ if node:
+ if id(node) not in incoming:
+ incoming[id(node)] = 1
+ for child in node[1]:
+ count_incoming(child)
+ else:
+ incoming[id(node)] += 1
- for node in dafsa:
- count_incoming(node)
+ for node in dafsa:
+ count_incoming(node)
- for node in dafsa:
- incoming[id(node)] -= 1
+ for node in dafsa:
+ incoming[id(node)] -= 1
- waiting = [node for node in dafsa if incoming[id(node)] == 0]
- nodes = []
+ waiting = [node for node in dafsa if incoming[id(node)] == 0]
+ nodes = []
- while waiting:
- node = waiting.pop()
- assert incoming[id(node)] == 0
- nodes.append(node)
- for child in node[1]:
- if child:
- incoming[id(child)] -= 1
- if incoming[id(child)] == 0:
- waiting.append(child)
- return nodes
+ while waiting:
+ node = waiting.pop()
+ assert incoming[id(node)] == 0
+ nodes.append(node)
+ for child in node[1]:
+ if child:
+ incoming[id(child)] -= 1
+ if incoming[id(child)] == 0:
+ waiting.append(child)
+ return nodes
def encode_links(children, offsets, current):
- """Encodes a list of children as one, two or three byte offsets."""
- if not children[0]:
- # This is an <end_label> node and no links follow such nodes
- assert len(children) == 1
- return []
- guess = 3 * len(children)
- assert children
- children = sorted(children, key = lambda x: -offsets[id(x)])
- while True:
- offset = current + guess
- buf = []
- for child in children:
- last = len(buf)
- distance = offset - offsets[id(child)]
- assert distance > 0 and distance < (1 << 21)
+ """Encodes a list of children as one, two or three byte offsets."""
+ if not children[0]:
+ # This is an <end_label> node and no links follow such nodes
+ assert len(children) == 1
+ return []
+ guess = 3 * len(children)
+ assert children
+ children = sorted(children, key=lambda x: -offsets[id(x)])
+ while True:
+ offset = current + guess
+ buf = []
+ for child in children:
+ last = len(buf)
+ distance = offset - offsets[id(child)]
+ assert distance > 0 and distance < (1 << 21)
- if distance < (1 << 6):
- # A 6-bit offset: "s0xxxxxx"
- buf.append(distance)
- elif distance < (1 << 13):
- # A 13-bit offset: "s10xxxxxxxxxxxxx"
- buf.append(0x40 | (distance >> 8))
- buf.append(distance & 0xFF)
- else:
- # A 21-bit offset: "s11xxxxxxxxxxxxxxxxxxxxx"
- buf.append(0x60 | (distance >> 16))
- buf.append((distance >> 8) & 0xFF)
- buf.append(distance & 0xFF)
- # Distance in first link is relative to following record.
- # Distance in other links are relative to previous link.
- offset -= distance
- if len(buf) == guess:
- break
- guess = len(buf)
- # Set most significant bit to mark end of links in this node.
- buf[last] |= (1 << 7)
- buf.reverse()
- return buf
+ if distance < (1 << 6):
+ # A 6-bit offset: "s0xxxxxx"
+ buf.append(distance)
+ elif distance < (1 << 13):
+ # A 13-bit offset: "s10xxxxxxxxxxxxx"
+ buf.append(0x40 | (distance >> 8))
+ buf.append(distance & 0xFF)
+ else:
+ # A 21-bit offset: "s11xxxxxxxxxxxxxxxxxxxxx"
+ buf.append(0x60 | (distance >> 16))
+ buf.append((distance >> 8) & 0xFF)
+ buf.append(distance & 0xFF)
+ # Distance in first link is relative to following record.
+ # Distance in other links are relative to previous link.
+ offset -= distance
+ if len(buf) == guess:
+ break
+ guess = len(buf)
+ # Set most significant bit to mark end of links in this node.
+ buf[last] |= (1 << 7)
+ buf.reverse()
+ return buf
def encode_prefix(label):
- """Encodes a node label as a list of bytes without a trailing high byte.
+ """Encodes a node label as a list of bytes without a trailing high byte.
- This method encodes a node if there is exactly one child and the
- child follows immidiately after so that no jump is needed. This label
- will then be a prefix to the label in the child node.
- """
- assert label
- return [ord(c) for c in reversed(label)]
+ This method encodes a node if there is exactly one child and the
+ child follows immidiately after so that no jump is needed. This label
+ will then be a prefix to the label in the child node.
+ """
+ assert label
+ return [ord(c) for c in reversed(label)]
def encode_label(label):
- """Encodes a node label as a list of bytes with a trailing high byte >0x80.
- """
- buf = encode_prefix(label)
- # Set most significant bit to mark end of label in this node.
- buf[0] |= (1 << 7)
- return buf
+ """Encodes a node label as a list of bytes with a trailing high byte >0x80.
+ """
+ buf = encode_prefix(label)
+ # Set most significant bit to mark end of label in this node.
+ buf[0] |= (1 << 7)
+ return buf
def encode(dafsa):
- """Encodes a DAFSA to a list of bytes"""
- output = []
- offsets = {}
+ """Encodes a DAFSA to a list of bytes"""
+ output = []
+ offsets = {}
- for node in reversed(top_sort(dafsa)):
- if (len(node[1]) == 1 and node[1][0] and
- (offsets[id(node[1][0])] == len(output))):
- output.extend(encode_prefix(node[0]))
- else:
- output.extend(encode_links(node[1], offsets, len(output)))
- output.extend(encode_label(node[0]))
- offsets[id(node)] = len(output)
+ for node in reversed(top_sort(dafsa)):
+ if (len(node[1]) == 1 and node[1][0] and
+ (offsets[id(node[1][0])] == len(output))):
+ output.extend(encode_prefix(node[0]))
+ else:
+ output.extend(encode_links(node[1], offsets, len(output)))
+ output.extend(encode_label(node[0]))
+ offsets[id(node)] = len(output)
- output.extend(encode_links(dafsa, offsets, len(output)))
- output.reverse()
- return output
+ output.extend(encode_links(dafsa, offsets, len(output)))
+ output.reverse()
+ return output
def to_cxx(data, preamble=None):
- """Generates C++ code from a list of encoded bytes."""
- text = '/* This file is generated. DO NOT EDIT!\n\n'
- text += 'The byte array encodes a dictionary of strings and values. See '
- text += 'make_dafsa.py for documentation.'
- text += '*/\n\n'
+ """Generates C++ code from a list of encoded bytes."""
+ text = '/* This file is generated. DO NOT EDIT!\n\n'
+ text += 'The byte array encodes a dictionary of strings and values. See '
+ text += 'make_dafsa.py for documentation.'
+ text += '*/\n\n'
- if preamble:
- text += preamble
- text += '\n\n'
+ if preamble:
+ text += preamble
+ text += '\n\n'
- text += 'const unsigned char kDafsa[%s] = {\n' % len(data)
- for i in range(0, len(data), 12):
- text += ' '
- text += ', '.join('0x%02x' % byte for byte in data[i:i + 12])
- text += ',\n'
- text += '};\n'
- return text
+ text += 'const unsigned char kDafsa[%s] = {\n' % len(data)
+ for i in range(0, len(data), 12):
+ text += ' '
+ text += ', '.join('0x%02x' % byte for byte in data[i:i + 12])
+ text += ',\n'
+ text += '};\n'
+ return text
def words_to_cxx(words, preamble=None):
- """Generates C++ code from a word list"""
- dafsa = to_dafsa(words)
- for fun in (reverse, join_suffixes, reverse, join_suffixes, join_labels):
- dafsa = fun(dafsa)
- return to_cxx(encode(dafsa), preamble)
+ """Generates C++ code from a word list"""
+ dafsa = to_dafsa(words)
+ for fun in (reverse, join_suffixes, reverse, join_suffixes, join_labels):
+ dafsa = fun(dafsa)
+ return to_cxx(encode(dafsa), preamble)
def parse_gperf(infile):
- """Parses gperf file and extract strings and return code"""
- lines = [line.strip() for line in infile]
+ """Parses gperf file and extract strings and return code"""
+ lines = [line.strip() for line in infile]
- # Extract the preamble.
- first_delimeter = lines.index('%%')
- preamble = '\n'.join(lines[0:first_delimeter])
+ # Extract the preamble.
+ first_delimeter = lines.index('%%')
+ preamble = '\n'.join(lines[0:first_delimeter])
- # Extract strings after the first '%%' and before the second '%%'.
- begin = first_delimeter + 1
- end = lines.index('%%', begin)
- lines = lines[begin:end]
- for line in lines:
- if line[-3:-1] != ', ':
- raise InputError('Expected "domainname, <digit>", found "%s"' % line)
- # Technically the DAFSA format could support return values in range [0-31],
- # but the values below are the only with a defined meaning.
- if line[-1] not in '0124':
- raise InputError('Expected value to be one of {0,1,2,4}, found "%s"' %
- line[-1])
- return (preamble, [line[:-3] + line[-1] for line in lines])
+ # Extract strings after the first '%%' and before the second '%%'.
+ begin = first_delimeter + 1
+ end = lines.index('%%', begin)
+ lines = lines[begin:end]
+ for line in lines:
+ if line[-3:-1] != ', ':
+ raise InputError('Expected "domainname, <digit>", found "%s"' % line)
+ # Technically the DAFSA format could support return values in range [0-31],
+ # but the values below are the only with a defined meaning.
+ if line[-1] not in '0124':
+ raise InputError('Expected value to be one of {0,1,2,4}, found "%s"' %
+ line[-1])
+ return (preamble, [line[:-3] + line[-1] for line in lines])
def main(outfile, infile):
- with open(infile, 'r') as infile:
- preamble, words = parse_gperf(infile)
- outfile.write(words_to_cxx(words, preamble))
- return 0
+ with open(infile, 'r') as infile:
+ preamble, words = parse_gperf(infile)
+ outfile.write(words_to_cxx(words, preamble))
+ return 0
if __name__ == '__main__':
- if len(sys.argv) != 3:
- print('usage: %s infile outfile' % sys.argv[0])
- sys.exit(1)
+ if len(sys.argv) != 3:
+ print('usage: %s infile outfile' % sys.argv[0])
+ sys.exit(1)
- with open(sys.argv[2], 'w') as outfile:
- sys.exit(main(outfile, sys.argv[1]))
+ with open(sys.argv[2], 'w') as outfile:
+ sys.exit(main(outfile, sys.argv[1]))
--- a/xpcom/idl-parser/xpidl/header.py
+++ b/xpcom/idl-parser/xpidl/header.py
@@ -60,21 +60,21 @@ def attributeParamlist(a, getter):
l = ["%s%s" % (a.realtype.nativeType(getter and 'out' or 'in'),
attributeParamName(a))]
if a.implicit_jscontext:
l.insert(0, "JSContext* cx")
return ", ".join(l)
-def attributeAsNative(a, getter, declType = 'NS_IMETHOD'):
- params = {'returntype': attributeReturnType(a, declType),
- 'binaryname': attributeNativeName(a, getter),
- 'paramlist': attributeParamlist(a, getter)}
- return "%(returntype)s %(binaryname)s(%(paramlist)s)" % params
+def attributeAsNative(a, getter, declType='NS_IMETHOD'):
+ params = {'returntype': attributeReturnType(a, declType),
+ 'binaryname': attributeNativeName(a, getter),
+ 'paramlist': attributeParamlist(a, getter)}
+ return "%(returntype)s %(binaryname)s(%(paramlist)s)" % params
def methodNativeName(m):
return m.binaryname is not None and m.binaryname or firstCap(m.name)
def methodReturnType(m, macro):
"""macro should be NS_IMETHOD or NS_IMETHODIMP"""
@@ -87,17 +87,17 @@ def methodReturnType(m, macro):
ret = "%s_(%s)" % (macro, m.realtype.nativeType('in').strip())
else:
ret = macro
if m.must_use:
ret = "MOZ_MUST_USE " + ret
return ret
-def methodAsNative(m, declType = 'NS_IMETHOD'):
+def methodAsNative(m, declType='NS_IMETHOD'):
return "%s %s(%s)" % (methodReturnType(m, declType),
methodNativeName(m),
paramlistAsNative(m))
def paramlistAsNative(m, empty='void'):
l = [paramAsNative(p) for p in m.params]
@@ -154,16 +154,17 @@ def paramlistNames(m):
if not m.notxpcom and m.realtype.name != 'void':
names.append('_retval')
if len(names) == 0:
return ''
return ', '.join(names)
+
header = """/*
* DO NOT EDIT. THIS FILE IS GENERATED FROM %(filename)s
*/
#ifndef __gen_%(basename)s_h__
#define __gen_%(basename)s_h__
"""
@@ -332,16 +333,17 @@ attr_refcnt_infallible_tmpl = """\
def write_interface(iface, fd):
if iface.namemap is None:
raise Exception("Interface was not resolved.")
# Confirm that no names of methods will overload in this interface
names = set()
+
def record_name(name):
if name in names:
raise Exception("Unexpected overloaded virtual method %s in interface %s"
% (name, iface.name))
names.add(name)
for m in iface.members:
if type(m) == xpidl.Attribute:
record_name(attributeNativeName(m, getter=True))
@@ -378,17 +380,17 @@ def write_interface(iface, fd):
fd.write(" %s = 0;\n" % attributeAsNative(a, True))
if a.infallible:
realtype = a.realtype.nativeType('in')
tmpl = attr_builtin_infallible_tmpl
if a.realtype.kind != 'builtin':
assert realtype.endswith(' *'), "bad infallible type"
tmpl = attr_refcnt_infallible_tmpl
- realtype = realtype[:-2] # strip trailing pointer
+ realtype = realtype[:-2] # strip trailing pointer
fd.write(tmpl % {'realtype': realtype,
'nativename': attributeNativeName(a, getter=True),
'args': '' if not a.implicit_jscontext else 'JSContext* cx',
'argnames': '' if not a.implicit_jscontext else 'cx, '})
if not a.readonly:
fd.write(" %s = 0;\n" % attributeAsNative(a, False))
@@ -448,39 +450,41 @@ def write_interface(iface, fd):
fd.write(iface_epilog % names)
def writeDeclaration(fd, iface, virtual):
declType = "NS_IMETHOD" if virtual else "nsresult"
suffix = " override" if virtual else ""
for member in iface.members:
if isinstance(member, xpidl.Attribute):
if member.infallible:
- fd.write("\\\n using %s::%s; " % (iface.name, attributeNativeName(member, True)))
+ fd.write("\\\n using %s::%s; " %
+ (iface.name, attributeNativeName(member, True)))
fd.write("\\\n %s%s; " % (attributeAsNative(member, True, declType), suffix))
if not member.readonly:
fd.write("\\\n %s%s; " % (attributeAsNative(member, False, declType), suffix))
elif isinstance(member, xpidl.Method):
fd.write("\\\n %s%s; " % (methodAsNative(member, declType), suffix))
if len(iface.members) == 0:
fd.write('\\\n /* no methods! */')
elif not member.kind in ('attribute', 'method'):
fd.write('\\')
- writeDeclaration(fd, iface, True);
+ writeDeclaration(fd, iface, True)
fd.write(iface_nonvirtual % names)
- writeDeclaration(fd, iface, False);
+ writeDeclaration(fd, iface, False)
fd.write(iface_forward % names)
def emitTemplate(forward_infallible, tmpl, tmpl_notxpcom=None):
if tmpl_notxpcom is None:
tmpl_notxpcom = tmpl
for member in iface.members:
if isinstance(member, xpidl.Attribute):
if forward_infallible and member.infallible:
- fd.write("\\\n using %s::%s; " % (iface.name, attributeNativeName(member, True)))
+ fd.write("\\\n using %s::%s; " %
+ (iface.name, attributeNativeName(member, True)))
fd.write(tmpl % {'asNative': attributeAsNative(member, True),
'nativeName': attributeNativeName(member, True),
'paramList': attributeParamNames(member)})
if not member.readonly:
fd.write(tmpl % {'asNative': attributeAsNative(member, False),
'nativeName': attributeNativeName(member, False),
'paramList': attributeParamNames(member)})
elif isinstance(member, xpidl.Method):
@@ -522,10 +526,11 @@ def main(outputfile):
# properly
for fileglobs in [os.path.join(cachedir, f) for f in ["xpidllex.py*", "xpidlyacc.py*"]]:
for filename in glob.glob(fileglobs):
os.remove(filename)
# Instantiate the parser.
p = xpidl.IDLParser(outputdir=cachedir)
+
if __name__ == '__main__':
main(None)
--- a/xpcom/idl-parser/xpidl/jsonxpt.py
+++ b/xpcom/idl-parser/xpidl/jsonxpt.py
@@ -53,17 +53,17 @@ def flags(*flags):
return [flag for flag, cond in flags if cond]
def get_type(type, calltype, iid_is=None, size_is=None):
while isinstance(type, xpidl.Typedef):
type = type.realtype
if isinstance(type, xpidl.Builtin):
- ret = { 'tag': TypeMap[type.name] }
+ ret = {'tag': TypeMap[type.name]}
if type.name in ['string', 'wstring'] and size_is is not None:
ret['tag'] += '_SIZE_IS'
ret['size_is'] = size_is
return ret
if isinstance(type, xpidl.Array):
# NB: For an Array<T> we pass down the iid_is to get the type of T.
# This allows Arrays of InterfaceIs types to work.
@@ -93,17 +93,17 @@ def get_type(type, calltype, iid_is=None
'tag': TypeMap[type.specialtype]
}
elif iid_is is not None:
return {
'tag': 'TD_INTERFACE_IS_TYPE',
'iid_is': iid_is,
}
else:
- return { 'tag': 'TD_VOID' }
+ return {'tag': 'TD_VOID'}
raise Exception("Unknown type!")
def mk_param(type, in_=0, out=0, optional=0):
return {
'type': type,
'flags': flags(
@@ -151,17 +151,17 @@ def build_interface(iface):
# State used while building an interface
consts = []
methods = []
def build_const(c):
consts.append({
'name': c.name,
'type': get_type(c.basetype, ''),
- 'value': c.getValue(), # All of our consts are numbers
+ 'value': c.getValue(), # All of our consts are numbers
})
def build_method(m):
params = []
for p in m.params:
params.append(mk_param(
get_type(
p.realtype, p.paramtype,
@@ -235,17 +235,21 @@ def build_typelib(idl):
if p.kind != 'interface':
return False
# Only export scriptable or shim interfaces
return p.attributes.scriptable or p.attributes.shim
return [build_interface(p) for p in idl.productions if exported(p)]
# Link a list of typelibs together into a single typelib
+
+
def link(typelibs):
linked = list(itertools.chain.from_iterable(typelibs))
assert len(set(iface['name'] for iface in linked)) == len(linked), \
"Multiple typelibs containing the same interface were linked together"
return linked
# Write the typelib into the fd file
+
+
def write(typelib, fd):
json.dump(typelib, fd, indent=2)
--- a/xpcom/idl-parser/xpidl/runtests.py
+++ b/xpcom/idl-parser/xpidl/runtests.py
@@ -28,17 +28,18 @@ class TestParser(unittest.TestCase):
def testInterface(self):
i = self.p.parse("[uuid(abc)] interface foo {};", filename='f')
self.assertTrue(isinstance(i, xpidl.IDL))
self.assertTrue(isinstance(i.productions[0], xpidl.Interface))
self.assertEqual("foo", i.productions[0].name)
def testAttributes(self):
- i = self.p.parse("[scriptable, builtinclass, function, uuid(abc)] interface foo {};", filename='f')
+ i = self.p.parse(
+ "[scriptable, builtinclass, function, uuid(abc)] interface foo {};", filename='f')
self.assertTrue(isinstance(i, xpidl.IDL))
self.assertTrue(isinstance(i.productions[0], xpidl.Interface))
iface = i.productions[0]
self.assertEqual("foo", iface.name)
self.assertTrue(iface.attributes.scriptable)
self.assertTrue(iface.attributes.builtinclass)
self.assertTrue(iface.attributes.function)
@@ -96,18 +97,21 @@ attribute long bar;
self.assertEqual("long", a.type)
def testOverloadedVirtual(self):
i = self.p.parse("""[uuid(abc)] interface foo {
attribute long bar;
void getBar();
};""", filename='f')
self.assertTrue(isinstance(i, xpidl.IDL))
+
class FdMock:
def write(self, s):
pass
try:
header.print_header(i, FdMock(), filename='f')
except Exception as e:
- self.assertEqual(e.args[0], "Unexpected overloaded virtual method GetBar in interface foo")
+ self.assertEqual(
+ e.args[0], "Unexpected overloaded virtual method GetBar in interface foo")
+
if __name__ == '__main__':
mozunit.main(runwith='unittest')
--- a/xpcom/idl-parser/xpidl/rust.py
+++ b/xpcom/idl-parser/xpidl/rust.py
@@ -202,16 +202,17 @@ pub %s: *const ::libc::c_void""" % (reas
method_impl_tmpl = """\
#[inline]
pub unsafe fn %(name)s(&self, %(params)s) -> %(ret_ty)s {
((*self.vtable).%(name)s)(self, %(args)s)
}
"""
+
def methodAsWrapper(iface, m):
try:
param_list = methodRawParamList(iface, m)
params = ["%s: %s" % x for x in param_list]
args = [x[0] for x in param_list]
return method_impl_tmpl % {
'name': methodNativeName(m),
@@ -230,16 +231,17 @@ infallible_impl_tmpl = """\
pub unsafe fn %(name)s(&self) -> %(realtype)s {
let mut result = <%(realtype)s as ::std::default::Default>::default();
let _rv = ((*self.vtable).%(name)s)(self, &mut result);
debug_assert!(::nserror::NsresultExt::succeeded(_rv));
result
}
"""
+
def attrAsWrapper(iface, m, getter):
try:
if m.implicit_jscontext:
raise xpidl.RustNoncompat("jscontext is unsupported")
if m.nostdcall:
raise xpidl.RustNoncompat("nostdcall is unsupported")
@@ -299,17 +301,17 @@ def print_rust_bindings(idl, fd, filenam
if p.kind == 'typedef':
try:
# We have to skip the typedef of bool to bool (it doesn't make any sense anyways)
if p.name == "bool":
continue
if printdoccomments:
fd.write("/// `typedef %s %s;`\n///\n" %
- (p.realtype.nativeType('in'), p.name))
+ (p.realtype.nativeType('in'), p.name))
fd.write(doccomments(p.doccomments))
fd.write("pub type %s = %s;\n\n" % (p.name, p.realtype.rustType('in')))
except xpidl.RustNoncompat as reason:
fd.write("/* unable to generate %s typedef because `%s` */\n\n" %
(p.name, reason))
base_vtable_tmpl = """
@@ -476,20 +478,20 @@ def write_interface(iface, fd):
names = uuid_decoder.match(iface.attributes.uuid).groupdict()
m3str = names['m3'] + names['m4']
names['m3joined'] = ", ".join(["0x%s" % m3str[i:i+2] for i in xrange(0, 16, 2)])
names['name'] = iface.name
if printdoccomments:
if iface.base is not None:
fd.write("/// `interface %s : %s`\n///\n" %
- (iface.name, iface.base))
+ (iface.name, iface.base))
else:
fd.write("/// `interface %s`\n///\n" %
- iface.name)
+ iface.name)
printComments(fd, iface.doccomments, '')
fd.write(struct_tmpl % names)
if iface.base is not None:
fd.write(deref_tmpl % {
'name': iface.name,
'base': iface.base,
})
--- a/xpcom/idl-parser/xpidl/xpidl.py
+++ b/xpcom/idl-parser/xpidl/xpidl.py
@@ -47,17 +47,18 @@ def rustBlacklistedForward(s):
def attlistToIDL(attlist):
if len(attlist) == 0:
return ''
attlist = list(attlist)
attlist.sort(cmp=lambda a, b: cmp(a[0], b[0]))
return '[%s] ' % ','.join(["%s%s" % (name, value is not None and '(%s)' % value or '')
- for name, value, aloc in attlist])
+ for name, value, aloc in attlist])
+
_paramsHardcode = {
2: ('array', 'shared', 'iid_is', 'size_is', 'retval'),
3: ('array', 'size_is', 'const'),
}
def paramAttlistToIDL(attlist):
@@ -126,17 +127,18 @@ class Builtin(object):
return True
def isPointer(self):
"""Check if this type is a pointer type - this will control how pointers act"""
return self.nativename.endswith('*')
def nativeType(self, calltype, shared=False, const=False):
if const:
- print >>sys.stderr, IDLError("[const] doesn't make sense on builtin types.", self.location, warning=True)
+ print >>sys.stderr, IDLError(
+ "[const] doesn't make sense on builtin types.", self.location, warning=True)
const = 'const '
elif calltype == 'in' and self.isPointer():
const = 'const '
elif shared:
if not self.isPointer():
raise IDLError("[shared] not applicable to non-pointer types.", self.location)
const = 'const '
else:
@@ -149,16 +151,17 @@ class Builtin(object):
# was requested.
const = const or (calltype == 'in' and self.isPointer()) or shared
rustname = self.rustname
if const and self.isPointer():
rustname = self.rustname.replace("*mut", "*const")
return "%s%s" % (calltype != 'in' and '*mut ' or '', rustname)
+
builtinNames = [
Builtin('boolean', 'bool', 'bool'),
Builtin('void', 'void', 'libc::c_void'),
Builtin('octet', 'uint8_t', 'libc::uint8_t'),
Builtin('short', 'int16_t', 'libc::int16_t', True, True),
Builtin('long', 'int32_t', 'libc::int32_t', True, True),
Builtin('long long', 'int64_t', 'libc::int64_t', True, False),
Builtin('unsigned short', 'uint16_t', 'libc::uint16_t', False, True),
@@ -215,57 +218,61 @@ class Location(object):
self.resolve()
return "%s line %s:%s\n%s\n%s" % (self._file, self._lineno, self._colno,
self._line, self.pointerline())
class NameMap(object):
"""Map of name -> object. Each object must have a .name and .location property.
Setting the same name twice throws an error."""
+
def __init__(self):
self._d = {}
def __getitem__(self, key):
if key in builtinMap:
return builtinMap[key]
return self._d[key]
def __iter__(self):
return self._d.itervalues()
def __contains__(self, key):
return key in builtinMap or key in self._d
def set(self, object):
if object.name in builtinMap:
- raise IDLError("name '%s' is a builtin and cannot be redeclared" % (object.name), object.location)
+ raise IDLError("name '%s' is a builtin and cannot be redeclared" %
+ (object.name), object.location)
if object.name.startswith("_"):
object.name = object.name[1:]
if object.name in self._d:
old = self._d[object.name]
if old == object:
return
if isinstance(old, Forward) and isinstance(object, Interface):
self._d[object.name] = object
elif isinstance(old, Interface) and isinstance(object, Forward):
pass
else:
- raise IDLError("name '%s' specified twice. Previous location: %s" % (object.name, self._d[object.name].location), object.location)
+ raise IDLError("name '%s' specified twice. Previous location: %s" %
+ (object.name, self._d[object.name].location), object.location)
else:
self._d[object.name] = object
def get(self, id, location):
try:
return self[id]
except KeyError:
raise IDLError("Name '%s' not found", location)
class RustNoncompat(Exception):
"""Thie exception is raised when a particular type or function cannot be safely exposed to rust code"""
+
def __init__(self, reason):
self.reason = reason
def __str__(self):
return self.reason
class IDLError(Exception):
@@ -654,34 +661,38 @@ class Interface(object):
break
self.doccomments = parent.getName(self.name, None).doccomments
if self.attributes.function:
has_method = False
for member in self.members:
if member.kind is 'method':
if has_method:
- raise IDLError("interface '%s' has multiple methods, but marked 'function'" % self.name, self.location)
+ raise IDLError(
+ "interface '%s' has multiple methods, but marked 'function'" % self.name, self.location)
else:
has_method = True
parent.setName(self)
if self.base is not None:
realbase = parent.getName(self.base, self.location)
if realbase.kind != 'interface':
- raise IDLError("interface '%s' inherits from non-interface type '%s'" % (self.name, self.base), self.location)
+ raise IDLError("interface '%s' inherits from non-interface type '%s'" %
+ (self.name, self.base), self.location)
if self.attributes.scriptable and not realbase.attributes.scriptable:
- raise IDLError("interface '%s' is scriptable but derives from non-scriptable '%s'" % (self.name, self.base), self.location, warning=True)
+ raise IDLError("interface '%s' is scriptable but derives from non-scriptable '%s'" %
+ (self.name, self.base), self.location, warning=True)
if self.attributes.scriptable and realbase.attributes.builtinclass and not self.attributes.builtinclass:
- raise IDLError("interface '%s' is not builtinclass but derives from builtinclass '%s'" % (self.name, self.base), self.location)
+ raise IDLError("interface '%s' is not builtinclass but derives from builtinclass '%s'" % (
+ self.name, self.base), self.location)
if realbase.implicit_builtinclass:
- self.implicit_builtinclass = True # Inherit implicit builtinclass from base
+ self.implicit_builtinclass = True # Inherit implicit builtinclass from base
for member in self.members:
member.resolve(self)
# The number 250 is NOT arbitrary; this number is the maximum number of
# stub entries defined in xpcom/reflect/xptcall/genstubs.pl
# Do not increase this value without increasing the number in that
# location, or you WILL cause otherwise unknown problems!
@@ -784,17 +795,17 @@ class InterfaceAttributes(object):
'scriptable': (False, setscriptable),
'builtinclass': (False, setbuiltinclass),
'function': (False, setfunction),
'noscript': (False, setnoscript),
'object': (False, lambda self: True),
'main_process_scriptable_only': (False, setmain_process_scriptable_only),
'shim': (True, setshim),
'shimfile': (True, setshimfile),
- }
+ }
def __init__(self, attlist, location):
def badattribute(self):
raise IDLError("Unexpected interface attribute '%s'" % name, location)
for name, val, aloc in attlist:
hasval, action = self.actions.get(name, (False, badattribute))
if hasval:
@@ -844,17 +855,18 @@ class ConstMember(object):
def resolve(self, parent):
self.realtype = parent.idl.getName(self.type, self.location)
self.iface = parent
basetype = self.realtype
while isinstance(basetype, Typedef):
basetype = basetype.realtype
if not isinstance(basetype, Builtin) or not basetype.maybeConst:
- raise IDLError("const may only be a short or long type, not %s" % self.type, self.location)
+ raise IDLError("const may only be a short or long type, not %s" %
+ self.type, self.location)
self.basetype = basetype
def getValue(self):
return self.value(self.iface)
def __str__(self):
return "\tconst %s %s = %s\n" % (self.type, self.name, self.getValue())
@@ -1019,26 +1031,29 @@ class Method(object):
def resolve(self, iface):
self.iface = iface
self.realtype = self.iface.idl.getName(self.type, self.location)
for p in self.params:
p.resolve(self)
for p in self.params:
if p.retval and p != self.params[-1]:
- raise IDLError("'retval' parameter '%s' is not the last parameter" % p.name, self.location)
+ raise IDLError("'retval' parameter '%s' is not the last parameter" %
+ p.name, self.location)
if p.size_is:
found_size_param = False
for size_param in self.params:
if p.size_is == size_param.name:
found_size_param = True
if getBuiltinOrNativeTypeName(size_param.realtype) != 'unsigned long':
- raise IDLError("is_size parameter must have type 'unsigned long'", self.location)
+ raise IDLError(
+ "is_size parameter must have type 'unsigned long'", self.location)
if not found_size_param:
- raise IDLError("could not find is_size parameter '%s'" % p.size_is, self.location)
+ raise IDLError("could not find is_size parameter '%s'" %
+ p.size_is, self.location)
def isScriptable(self):
if not self.iface.attributes.scriptable:
return False
return not (self.noscript or self.notxpcom)
def __str__(self):
return "\t%s %s(%s)\n" % (self.type, self.name, ", ".join([p.name for p in self.params]))
@@ -1209,29 +1224,29 @@ class IDLParser(object):
'inout': 'INOUT',
'out': 'OUT',
'attribute': 'ATTRIBUTE',
'raises': 'RAISES',
'readonly': 'READONLY',
'native': 'NATIVE',
'typedef': 'TYPEDEF',
'webidl': 'WEBIDL',
- }
+ }
tokens = [
'IDENTIFIER',
'CDATA',
'INCLUDE',
'IID',
'NUMBER',
'HEXNUM',
'LSHIFT',
'RSHIFT',
'NATIVEID',
- ]
+ ]
tokens.extend(keywords.values())
states = (
('nativeid', 'exclusive'),
)
hexchar = r'[a-fA-F0-9]'
@@ -1396,17 +1411,17 @@ class IDLParser(object):
"""interface : attributes INTERFACE IDENTIFIER ifacebase ifacebody ';'"""
atts, INTERFACE, name, base, body, SEMI = p[1:]
attlist = atts['attlist']
doccomments = []
if 'doccomments' in atts:
doccomments.extend(atts['doccomments'])
doccomments.extend(p.slice[2].doccomments)
- l = lambda: self.getLocation(p, 2)
+ def l(): return self.getLocation(p, 2)
if body is None:
# forward-declared interface... must not have attributes!
if len(attlist) != 0:
raise IDLError("Forward-declared interface must not have attributes",
list[0][3])
if base is not None:
@@ -1593,17 +1608,18 @@ class IDLParser(object):
def p_idlist_continue(self, p):
"""idlist : IDENTIFIER ',' idlist"""
p[0] = list(p[3])
p[0].insert(0, p[1])
def p_error(self, t):
if not t:
- raise IDLError("Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both", None)
+ raise IDLError(
+ "Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both", None)
else:
location = Location(self.lexer, t.lineno, t.lexpos)
raise IDLError("invalid syntax", location)
def __init__(self, outputdir=''):
self._doccomments = []
self.lexer = lex.lex(object=self,
outputdir=outputdir,
@@ -1633,13 +1649,14 @@ class IDLParser(object):
idl = self.parser.parse(lexer=self)
if filename is not None:
idl.deps.append(filename)
return idl
def getLocation(self, p, i):
return Location(self.lexer, p.lineno(i), p.lexpos(i))
+
if __name__ == '__main__':
p = IDLParser()
for f in sys.argv[1:]:
print "Parsing %s" % f
p.parse(open(f).read(), filename=f)
--- a/xpcom/reflect/xptinfo/perfecthash.py
+++ b/xpcom/reflect/xptinfo/perfecthash.py
@@ -27,26 +27,30 @@ FNV_PRIME = 16777619
# which we sometimes use as a flag.
U32_HIGH_BIT = 0x80000000
# A basic FNV-based hash function. bytes is the bytearray to hash. 32-bit FNV is
# used for indexing into the first table, and the value stored in that table is
# used as the offset basis for indexing into the values table.
#
# NOTE: C++ implementation is in xptinfo.cpp
+
+
def hash(bytes, h=FNV_OFFSET_BASIS):
for byte in bytes:
h ^= byte # xor-in the byte
h *= FNV_PRIME # Multiply by the FNV prime
- h &= 0xffffffff # clamp to 32-bits
+ h &= 0xffffffff # clamp to 32-bits
return h
+
IntermediateBucket = namedtuple('IntermediateBucket', ['index', 'entries'])
HashEntry = namedtuple('HashEntry', ['key', 'value'])
+
class PerfectHash(object):
"""An object representing a perfect hash function"""
def __init__(self, intermediate_table_size, data):
# data should be a list of (bytearray, value) pairs
self.intermediate = [0] * intermediate_table_size
self.values = [None] * len(data)
--- a/xpcom/reflect/xptinfo/xptcodegen.py
+++ b/xpcom/reflect/xptinfo/xptcodegen.py
@@ -13,34 +13,38 @@ from perfecthash import PerfectHash
import time
from collections import OrderedDict
# We fix the number of entries in our intermediate table used by the perfect
# hashes to 512. This number is constant in xptinfo, allowing the compiler to
# generate a more efficient modulo due to it being a power of 2.
PHFSIZE = 512
+
def indented(s):
return s.replace('\n', '\n ')
+
def cpp(v):
if type(v) == bool:
return "true" if v else "false"
return str(v)
+
def mkstruct(*fields):
def mk(comment, **vals):
assert len(fields) == len(vals)
r = "{ // " + comment
r += indented(','.join(
"\n/* %s */ %s" % (k, cpp(vals[k])) for k in fields))
r += "\n}"
return r
return mk
+
##########################################################
# Ensure these fields are in the same order as xptinfo.h #
##########################################################
nsXPTInterfaceInfo = mkstruct(
"mIID",
"mName",
"mParent",
"mBuiltinClass",
@@ -124,61 +128,65 @@ ConstInfo = mkstruct(
def split_at_idxs(s, lengths):
idx = 0
for length in lengths:
yield s[idx:idx+length]
idx += length
assert idx == len(s)
-def split_iid(iid): # Get the individual components out of an IID string.
- iid = iid.replace('-', '') # Strip any '-' delimiters
+
+def split_iid(iid): # Get the individual components out of an IID string.
+ iid = iid.replace('-', '') # Strip any '-' delimiters
return tuple(split_at_idxs(iid, (8, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2)))
-def iid_bytes(iid): # Get the byte representation of the IID for hashing.
+
+def iid_bytes(iid): # Get the byte representation of the IID for hashing.
bs = bytearray()
for num in split_iid(iid):
b = bytearray.fromhex(num)
# We store the bytes in little-endian. On big-endian systems, the C++
# code will flip the bytes to little-endian before hashing in order to
# keep the tables consistent.
b.reverse()
bs += b
return bs
# Split a 16-bit integer into its high and low 8 bits
+
+
def splitint(i):
assert i < 2**16
return (i >> 8, i & 0xff)
# Occasionally in xpconnect, we need to fabricate types to pass into the
# conversion methods. In some cases, these types need to be arrays, which hold
# indicies into the extra types array.
#
# These are some types which should have known indexes into the extra types
# array.
utility_types = [
- { 'tag': 'TD_INT8' },
- { 'tag': 'TD_UINT8' },
- { 'tag': 'TD_INT16' },
- { 'tag': 'TD_UINT16' },
- { 'tag': 'TD_INT32' },
- { 'tag': 'TD_UINT32' },
- { 'tag': 'TD_INT64' },
- { 'tag': 'TD_UINT64' },
- { 'tag': 'TD_FLOAT' },
- { 'tag': 'TD_DOUBLE' },
- { 'tag': 'TD_BOOL' },
- { 'tag': 'TD_CHAR' },
- { 'tag': 'TD_WCHAR' },
- { 'tag': 'TD_PNSIID' },
- { 'tag': 'TD_PSTRING' },
- { 'tag': 'TD_PWSTRING' },
- { 'tag': 'TD_INTERFACE_IS_TYPE', 'iid_is': 0 },
+ {'tag': 'TD_INT8'},
+ {'tag': 'TD_UINT8'},
+ {'tag': 'TD_INT16'},
+ {'tag': 'TD_UINT16'},
+ {'tag': 'TD_INT32'},
+ {'tag': 'TD_UINT32'},
+ {'tag': 'TD_INT64'},
+ {'tag': 'TD_UINT64'},
+ {'tag': 'TD_FLOAT'},
+ {'tag': 'TD_DOUBLE'},
+ {'tag': 'TD_BOOL'},
+ {'tag': 'TD_CHAR'},
+ {'tag': 'TD_WCHAR'},
+ {'tag': 'TD_PNSIID'},
+ {'tag': 'TD_PSTRING'},
+ {'tag': 'TD_PWSTRING'},
+ {'tag': 'TD_INTERFACE_IS_TYPE', 'iid_is': 0},
]
# Core of the code generator. Takes a list of raw JSON XPT interfaces, and
# writes out a file containing the necessary static declarations into fd.
def link_to_cpp(interfaces, fd):
# Perfect Hash from IID into the ifaces array.
iid_phf = PerfectHash(PHFSIZE, [
@@ -190,17 +198,17 @@ def link_to_cpp(interfaces, fd):
(bytearray(iface['name'], 'ascii'), idx)
for idx, iface in enumerate(iid_phf.values)
])
def interface_idx(name):
if name is not None:
idx = name_phf.lookup(bytearray(name, 'ascii'))
if iid_phf.values[idx]['name'] == name:
- return idx + 1 # One-based, so we can use 0 as a sentinel.
+ return idx + 1 # One-based, so we can use 0 as a sentinel.
return 0
# NOTE: State used while linking. This is done with closures rather than a
# class due to how this file's code evolved.
includes = set()
types = []
type_cache = {}
ifaces = []
@@ -223,17 +231,17 @@ def link_to_cpp(interfaces, fd):
if idx is None:
idx = domobject_cache[do['name']] = len(domobjects)
includes.add(do['headerFile'])
domobjects.append(nsXPTDOMObjectInfo(
"%d = %s" % (idx, do['name']),
# These methods are defined at the top of the generated file.
mUnwrap="UnwrapDOMObject<mozilla::dom::prototypes::id::%s, %s>" %
- (do['name'], do['native']),
+ (do['name'], do['native']),
mWrap="WrapDOMObject<%s>" % do['native'],
mCleanup="CleanupDOMObject<%s>" % do['native'],
))
return idx
def lower_string(s):
if s in strings:
@@ -250,17 +258,17 @@ def link_to_cpp(interfaces, fd):
def lower_extra_type(type):
key = describe_type(type)
idx = type_cache.get(key)
if idx is None:
idx = type_cache[key] = len(types)
types.append(lower_type(type))
return idx
- def describe_type(type): # Create the type's documentation comment.
+ def describe_type(type): # Create the type's documentation comment.
tag = type['tag'][3:].lower()
if tag == 'array':
return '%s[size_is=%d]' % (
describe_type(type['element']), type['size_is'])
elif tag == 'interface_type' or tag == 'domobject':
return type['name']
elif tag == 'interface_is_type':
return 'iid_is(%d)' % type['iid_is']
@@ -307,17 +315,17 @@ def link_to_cpp(interfaces, fd):
out='out' in param['flags'],
optional='optional' in param['flags'])
))
def lower_method(method, ifacename):
methodname = "%s::%s" % (ifacename, method['name'])
if 'notxpcom' in method['flags'] or 'hidden' in method['flags']:
- paramidx = name = numparams = 0 # hide parameters
+ paramidx = name = numparams = 0 # hide parameters
else:
name = lower_string(method['name'])
numparams = len(method['params'])
# Check cache for parameters
cachekey = json.dumps(method['params'])
paramidx = param_cache.get(cachekey)
if paramidx is None:
@@ -356,27 +364,27 @@ def link_to_cpp(interfaces, fd):
consts.append(ConstInfo(
"%d = %s::%s" % (len(consts), ifacename, const['name']),
mName=lower_string(const['name']),
mSigned=is_signed,
mValue="(uint32_t)%d" % const['value'],
))
- def lower_prop_hooks(iface): # XXX: Used by xpt shims
+ def lower_prop_hooks(iface): # XXX: Used by xpt shims
assert iface['shim'] is not None
# Add an include for the Binding file for the shim.
includes.add("mozilla/dom/%sBinding.h" %
- (iface['shimfile'] or iface['shim']))
+ (iface['shimfile'] or iface['shim']))
# Add the property hook reference to the sPropHooks table.
prophooks.append(
- "mozilla::dom::%sBinding::sNativePropertyHooks, // %d = %s(%s)" % \
- (iface['shim'], len(prophooks), iface['name'], iface['shim']))
+ "mozilla::dom::%sBinding::sNativePropertyHooks, // %d = %s(%s)" %
+ (iface['shim'], len(prophooks), iface['name'], iface['shim']))
def collect_base_info(iface):
methods = 0
consts = 0
while iface is not None:
methods += len(iface['methods'])
consts += len(iface['consts'])
idx = interface_idx(iface['parent'])
@@ -488,17 +496,17 @@ static void CleanupDOMObject(void* aObj)
namespace xpt {
namespace detail {
""")
# Static data arrays
def array(ty, name, els):
fd.write("const %s %s[] = {%s\n};\n\n" %
- (ty, name, ','.join(indented('\n' + str(e)) for e in els)))
+ (ty, name, ','.join(indented('\n' + str(e)) for e in els)))
array("nsXPTInterfaceInfo", "sInterfaces", ifaces)
array("nsXPTType", "sTypes", types)
array("nsXPTParamInfo", "sParams", params)
array("nsXPTMethodInfo", "sMethods", methods)
array("nsXPTDOMObjectInfo", "sDOMObjects", domobjects)
array("ConstInfo", "sConsts", consts)
array("mozilla::dom::NativePropertyHooks*", "sPropHooks", prophooks)
@@ -567,10 +575,11 @@ def main():
parser = ArgumentParser()
parser.add_argument('outfile', help='Output C++ file to generate')
parser.add_argument('xpts', nargs='*', help='source xpt files')
args = parser.parse_args(sys.argv[1:])
with open(args.outfile, 'w') as fd:
link_and_write(args.xpts, fd)
+
if __name__ == '__main__':
main()
--- a/xpcom/typelib/xpt/tools/runtests.py
+++ b/xpcom/typelib/xpt/tools/runtests.py
@@ -40,16 +40,17 @@ import unittest
import xpt
def get_output(bin, file):
p = subprocess.Popen([bin, file], stdout=subprocess.PIPE)
stdout, _ = p.communicate()
return stdout
+
if "MOZILLA_OBJDIR" in os.environ:
class CheckXPTDump(unittest.TestCase):
def test_xpt_dump_diffs(self):
MOZILLA_OBJDIR = os.environ["MOZILLA_OBJDIR"]
xptdump = os.path.abspath(os.path.join(MOZILLA_OBJDIR,
"dist", "bin", "xpt_dump"))
components = os.path.abspath(os.path.join(MOZILLA_OBJDIR,
"dist", "bin", "components"))
@@ -265,66 +266,69 @@ class TestTypelibRoundtrip(unittest.Test
i = xpt.Interface("IFoo", iid="11223344-5566-7788-9900-aabbccddeeff",
methods=[m])
t = xpt.Typelib(interfaces=[i])
self.checkRoundtrip(t)
# add some more methods
i.methods.append(xpt.Method("One", xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
params=[
xpt.Param(xpt.SimpleType(xpt.Type.Tags.int64)),
- xpt.Param(xpt.SimpleType(xpt.Type.Tags.float, pointer=True))
- ]))
+ xpt.Param(xpt.SimpleType(
+ xpt.Type.Tags.float, pointer=True))
+ ]))
self.checkRoundtrip(t)
# test some other types (should really be more thorough)
i.methods.append(xpt.Method("Two", xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
params=[
- xpt.Param(xpt.SimpleType(xpt.Type.Tags.UTF8String, pointer=True)),
- xpt.Param(xpt.SimpleType(xpt.Type.Tags.wchar_t_ptr, pointer=True))
- ]))
+ xpt.Param(xpt.SimpleType(
+ xpt.Type.Tags.UTF8String, pointer=True)),
+ xpt.Param(xpt.SimpleType(
+ xpt.Type.Tags.wchar_t_ptr, pointer=True))
+ ]))
self.checkRoundtrip(t)
# add a method with an InterfaceType argument
bar = xpt.Interface("IBar")
t.interfaces.append(bar)
i.methods.append(xpt.Method("IFaceMethod", xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
params=[
xpt.Param(xpt.InterfaceType(bar))
- ]))
+ ]))
self.checkRoundtrip(t)
# add a method with an InterfaceIsType argument
i.methods.append(xpt.Method("IFaceIsMethod", xpt.Param(xpt.SimpleType(xpt.Type.Tags.void)),
params=[
xpt.Param(xpt.InterfaceIsType(1)),
xpt.Param(xpt.SimpleType(xpt.Type.Tags.nsIID))
- ]))
+ ]))
self.checkRoundtrip(t)
# add a method with an ArrayType argument
i.methods.append(xpt.Method("ArrayMethod", xpt.Param(xpt.SimpleType(xpt.Type.Tags.void)),
params=[
xpt.Param(xpt.ArrayType(
xpt.SimpleType(xpt.Type.Tags.int32),
1, 2)),
xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
- ]))
+ ]))
self.checkRoundtrip(t)
# add a method with a StringWithSize and WideStringWithSize arguments
i.methods.append(xpt.Method("StringWithSizeMethod", xpt.Param(xpt.SimpleType(xpt.Type.Tags.void)),
params=[
xpt.Param(xpt.StringWithSizeType(
1, 2)),
xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
xpt.Param(xpt.WideStringWithSizeType(
4, 5)),
xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
xpt.Param(xpt.SimpleType(xpt.Type.Tags.int32)),
- ]))
+ ]))
self.checkRoundtrip(t)
class TestInterfaceCmp(unittest.TestCase):
def test_unresolvedName(self):
"""
Test comparison function on xpt.Interface by name.
@@ -401,20 +405,22 @@ class TestXPTLink(unittest.TestCase):
self.assertEqual(2, len(t3.interfaces))
# Interfaces should wind up sorted
self.assertEqual("IBar", t3.interfaces[0].name)
self.assertEqual("IFoo", t3.interfaces[1].name)
# Add some IID values
t1 = xpt.Typelib()
# add an unresolved interface
- t1.interfaces.append(xpt.Interface("IFoo", iid="11223344-5566-7788-9900-aabbccddeeff", scriptable=True))
+ t1.interfaces.append(xpt.Interface(
+ "IFoo", iid="11223344-5566-7788-9900-aabbccddeeff", scriptable=True))
t2 = xpt.Typelib()
# add an unresolved interface
- t2.interfaces.append(xpt.Interface("IBar", iid="44332211-6655-8877-0099-aabbccddeeff", scriptable=True))
+ t2.interfaces.append(xpt.Interface(
+ "IBar", iid="44332211-6655-8877-0099-aabbccddeeff", scriptable=True))
t3 = xpt.xpt_link([t1, t2])
self.assertEqual(2, len(t3.interfaces))
# Interfaces should wind up sorted
self.assertEqual("IFoo", t3.interfaces[0].name)
self.assertEqual("IBar", t3.interfaces[1].name)
def test_mergeConflict(self):
@@ -446,32 +452,34 @@ class TestXPTLink(unittest.TestCase):
Test that merging a typelib with an unresolved definition of
an interface that's also unresolved in this typelib, but one
has a valid IID copies the IID value to the resulting typelib.
"""
# Unresolved in both, but t1 has an IID value
t1 = xpt.Typelib()
# add an unresolved interface with a valid IID
- t1.interfaces.append(xpt.Interface("IFoo", iid="11223344-5566-7788-9900-aabbccddeeff", scriptable=True))
+ t1.interfaces.append(xpt.Interface(
+ "IFoo", iid="11223344-5566-7788-9900-aabbccddeeff", scriptable=True))
t2 = xpt.Typelib()
# add an unresolved interface, no IID
t2.interfaces.append(xpt.Interface("IFoo"))
t3 = xpt.xpt_link([t1, t2])
self.assertEqual(1, len(t3.interfaces))
self.assertEqual("IFoo", t3.interfaces[0].name)
self.assertEqual("11223344-5566-7788-9900-aabbccddeeff", t3.interfaces[0].iid)
# Unresolved in both, but t2 has an IID value
t1 = xpt.Typelib()
# add an unresolved interface, no IID
t1.interfaces.append(xpt.Interface("IFoo"))
t2 = xpt.Typelib()
# add an unresolved interface with a valid IID
- t2.interfaces.append(xpt.Interface("IFoo", iid="11223344-5566-7788-9900-aabbccddeeff", scriptable=True))
+ t2.interfaces.append(xpt.Interface(
+ "IFoo", iid="11223344-5566-7788-9900-aabbccddeeff", scriptable=True))
t3 = xpt.xpt_link([t1, t2])
self.assertEqual(1, len(t3.interfaces))
self.assertEqual("IFoo", t3.interfaces[0].name)
self.assertEqual("11223344-5566-7788-9900-aabbccddeeff", t3.interfaces[0].iid)
def test_mergeResolvedUnresolved(self):
"""
@@ -761,10 +769,11 @@ class TestXPTLink(unittest.TestCase):
self.assertEqual("11223344-5566-7788-9900-aabbccddeeff", t3.interfaces[1].iid)
self.assert_(t3.interfaces[1].resolved)
# Ensure that IRetval's method's param type has been updated.
self.assertEqual(1, len(t3.interfaces[0].methods))
self.assert_(t3.interfaces[0].methods[0].params[0].type.element_type.iface.resolved)
self.assertEqual(t3.interfaces[1],
t3.interfaces[0].methods[0].params[0].type.element_type.iface)
+
if __name__ == '__main__':
mozunit.main()
--- a/xpcom/typelib/xpt/tools/xpt.py
+++ b/xpcom/typelib/xpt/tools/xpt.py
@@ -291,17 +291,17 @@ class Type(object):
# WideStringWithSizeTypeDescriptor
'WideStringWithSize',
# XXX: These are also SimpleTypes (but not in the spec)
# https://hg.mozilla.org/mozilla-central/annotate/0e0e2516f04e/xpcom/typelib/xpt/tools/xpt_dump.c#l69
'UTF8String',
'CString',
'AString',
'jsval',
- )
+ )
def __init__(self, pointer=False, reference=False):
self.pointer = pointer
self.reference = reference
if reference and not pointer:
raise Exception("If reference is True pointer must be True too")
def __cmp__(self, other):
@@ -752,16 +752,17 @@ class WideStringWithSizeType(Type):
return "wstring_s"
class CachedStringWriter(object):
"""
A cache that sits in front of a file to avoid adding the same
string multiple times.
"""
+
def __init__(self, file, data_pool_offset):
self.file = file
self.data_pool_offset = data_pool_offset
self.names = {}
def write(self, s):
if s:
if s in self.names:
@@ -1070,16 +1071,17 @@ class Method(object):
param_index = cd.add_params([p.code_gen(typelib, cd) for p in self.params])
num_params = len(self.params)
return "{%d, %d, 0x%x, %d}" % (string_index,
param_index,
self.encodeflags(),
num_params)
+
class Constant(object):
"""
A constant value of a specific type defined on an interface.
(ConstantDescriptor from the typelib specification.)
"""
_descriptorstart = struct.Struct(">I")
# Actual value is restricted to this set of types
@@ -1191,17 +1193,18 @@ class Interface(object):
self.function = function
self.builtinclass = builtinclass
self.main_process_scriptable_only = main_process_scriptable_only
# For sanity, if someone constructs an Interface and passes
# in methods or constants, then it's resolved.
if self.methods or self.constants:
# make sure it has a valid IID
if self.iid == Interface.UNRESOLVED_IID:
- raise DataError("Cannot instantiate Interface %s containing methods or constants with an unresolved IID" % self.name)
+ raise DataError(
+ "Cannot instantiate Interface %s containing methods or constants with an unresolved IID" % self.name)
self.resolved = True
# These are only used for writing out the interface
self._descriptor_offset = 0
self._name_offset = 0
self._namespace_offset = 0
self.xpt_filename = None
def __repr__(self):
@@ -1463,17 +1466,18 @@ class Typelib(object):
file_length,
interface_directory_offset,
data_pool_offset) = Typelib._header.unpack_from(data)
if magic != XPT_MAGIC:
raise FileFormatError("Bad magic: %s" % magic)
xpt = Typelib((major_ver, minor_ver))
xpt.filename = filename
if expected_size and file_length != expected_size:
- raise FileFormatError("File is of wrong length, got %d bytes, expected %d" % (expected_size, file_length))
+ raise FileFormatError(
+ "File is of wrong length, got %d bytes, expected %d" % (expected_size, file_length))
# XXX: by spec this is a zero-based file offset. however,
# the xpt_xdr code always subtracts 1 from data offsets
# (because that's what you do in the data pool) so it
# winds up accidentally treating this as 1-based.
# Filed as: https://bugzilla.mozilla.org/show_bug.cgi?id=575343
interface_directory_offset -= 1
# make a half-hearted attempt to read Annotations,
# since XPIDL doesn't produce any anyway.
@@ -1517,24 +1521,27 @@ class Typelib(object):
Check certain assumptions about data contained in this typelib.
Sort the interfaces array by IID, check that all interfaces
referenced by methods exist in the array.
"""
self.interfaces.sort()
for i in self.interfaces:
if i.parent and i.parent not in self.interfaces:
- raise DataError("Interface %s has parent %s not present in typelib!" % (i.name, i.parent.name))
+ raise DataError("Interface %s has parent %s not present in typelib!" %
+ (i.name, i.parent.name))
for m in i.methods:
for n, p in enumerate(m.params):
if isinstance(p, InterfaceType) and \
p.iface not in self.interfaces:
- raise DataError("Interface method %s::%s, parameter %d references interface %s not present in typelib!" % (i.name, m.name, n, p.iface.name))
+ raise DataError("Interface method %s::%s, parameter %d references interface %s not present in typelib!" % (
+ i.name, m.name, n, p.iface.name))
if isinstance(m.result, InterfaceType) and m.result.iface not in self.interfaces:
- raise DataError("Interface method %s::%s, result references interface %s not present in typelib!" % (i.name, m.name, m.result.iface.name))
+ raise DataError("Interface method %s::%s, result references interface %s not present in typelib!" % (
+ i.name, m.name, m.result.iface.name))
def writefd(self, fd):
# write out space for a header + one empty annotation,
# padded to 4-byte alignment.
headersize = (Typelib._header.size + 1)
if headersize % 4:
headersize += 4 - headersize % 4
fd.write("\x00" * headersize)
@@ -1656,17 +1663,17 @@ class Typelib(object):
m.hidden and "H" or " ",
m.notxpcom and "N" or " ",
m.constructor and "C" or " ",
m.optargc and "O" or " ",
m.implicit_jscontext and "J" or " ",
str(m.result.type),
m.name,
m.params and ", ".join(str(p) for p in m.params) or ""
- ))
+ ))
out.write(" Constants:\n")
if len(i.constants) == 0:
out.write(" No Constants\n")
else:
for c in i.constants:
out.write(" %s %s = %d;\n" % (c.type, c.name, c.value))
@@ -1781,17 +1788,17 @@ def xpt_link(inputs):
# Now fixup any merged interfaces
def checkType(t):
if isinstance(t, InterfaceType) and t.iface in merged_interfaces:
t.iface = merged_interfaces[t.iface]
elif isinstance(t, ArrayType) and \
isinstance(t.element_type, InterfaceType) and \
t.element_type.iface in merged_interfaces:
- t.element_type.iface = merged_interfaces[t.element_type.iface]
+ t.element_type.iface = merged_interfaces[t.element_type.iface]
for i in interfaces:
# Replace parent references
if i.parent in merged_interfaces:
i.parent = merged_interfaces[i.parent]
for m in i.methods:
# Replace InterfaceType params and return values
checkType(m.result.type)
@@ -1826,16 +1833,17 @@ def xpt_link(inputs):
maybe_add_to_worklist(p.type.element_type.iface)
interfaces = list(required_interfaces)
# Re-sort interfaces (by IID)
interfaces.sort()
return Typelib(interfaces=interfaces)
+
if __name__ == '__main__':
if len(sys.argv) < 3:
print >>sys.stderr, "xpt <dump|link|linkgen> <files>"
sys.exit(1)
if sys.argv[1] == 'dump':
xpt_dump(sys.argv[2])
elif sys.argv[1] == 'link':
xpt_link(sys.argv[3:]).write(sys.argv[2])