--- a/third_party/rust/bindgen/.cargo-checksum.json
+++ b/third_party/rust/bindgen/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"d2d15d8518f87e50143184b13e14139095b0a6aa627d4d5d3a2d12893e765f50","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"49f382ebdb94c8f0aba5496f5283ae3cdcf6666a5c41c604e6356e674f9a072b","src/clang.rs":"33d94fa699f052b52a2804b85f49fa5de3a2404ae05801ef9eae16016bc841f1","src/codegen/bitfield_unit.rs":"bd1a19701f1766d0bae3bcb97d7c3cb3881d4b182c56b8f4dfd24b7cc87b5338","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"bcb951f320fd0948e341d8eabdb58567296d77bf1ae5040c05d967e6435a15d5","src/codegen/impl_debug.rs":"e2ffd5b6ed936698aa4b9e7e3459d353383792707ad51f829a18a822f69cab0e","src/codegen/impl_partialeq.rs":"e86050b98f57fa4496dbde0beea319a89e46290309d274f626361779549b95bd","src/codegen/mod.rs":"74e818d857fb5147f623e9287632d23c4bd0cf87de840034809992b74ff88bdc","src/codegen/struct_layout.rs":"698ed7340242d1cbedd38d400c7c83c32e2fa043c35dbc9a2eea4022251ffdb0","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"8c2148a6f922ca9cb0de2dd3ad77c4dd5734c4c219a5bea9d6b22c4367acb187","src/ir/analysis/derive_copy.rs":"14b53c53b337be00d59424371c07c6712d7cfc9a6735f9d913cda043fddce797","src/ir/analysis/derive_debug.rs":"1d6621c0fa5d899310cc175cb99703606ed34fd7f7ad77bb60f012f25ba504af","src/ir/analysis/derive_default.rs":"4fac04fc3019562cd213586680ecdcf8a3b3544ca3a5c5117f68e5c26e7ee0d9","src/ir/analysis/derive_hash.rs":"a50e849b4388115264c2d6afef5ab07e309d2469f4c3342fb683c799451e9e19","src/ir/analysis/derive_partialeq_or_partialord.rs":"46611c7f3caa0fe78243187742c4a36003dbc266de4c4390642e136bb889c43f","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"2a0465503d2c8247eaf916bd6a03594f3dc0370533d9a7c58cc5afb86693816c","src/ir/analysis/has_type_param_in_array.rs":"fcb1c78b6000f1f5eb8d8147e2afdaba9eb0e3a81b61e72537048dfdbeea7bcd","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"3d3c8bde40604d53bb64273a3cbd8c55936a7dfe1de9b2ba92fc2c45572624b4","src/ir/analysis/template_params.rs":"5c6ee7a251a321ef5733e2e7ac3264621b4181268babcc008b69dbfc37691fb1","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"36f2a1d3970fdbf3d72c1f094043902747cde395215bdf7e9103926d9df011fd","src/ir/comp.rs":"b24e6ab76235b46bb5a262e3a3f2fc685c26bd7482aea113231a942eeffd0cf5","src/ir/context.rs":"ccd99c814f1a8ba1f3a30667a8d90db13a50adc26620be08f907283d1c5f08a3","src/ir/derive.rs":"1fd6ad621e3c60b950acbd51fbe386d1f0fadb7c1889c723245afff45e42e143","src/ir/dot.rs":"d01f1621ab67e368d854a82bd6bb0b8dd52f3c2c733de8eaf81aece9543818cb","src/ir/enum_ty.rs":"98a4aa58e598b31e4dc2c052c90029f37b53b5c5cfcbd216d4b0e8c73454813f","src/ir/function.rs":"cce97e7cd6ffb7d5ae40e6be1b82e7ae899d9ea9cf7c8f97380486ac7cc120e6","src/ir/int.rs":"1f61a472288afe489d9320bc8b13920333ece57891ae8570b4c4f25ab50688e6","src/ir/item.rs":"03477f4f4abfebc92ce4c9a202e33c86615dfffe3e6d1e0a201d2e85eecb9917","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"e3d1adf1ad2fa5bd96530cdd5097db3d9cc7b44d33ec23a04fcfccecd9cf4469","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"05068c4fbf42429c4ac2a233c874f18ffcf7dc1744398e400a5a48d0e7a972f2","src/ir/template.rs":"bcd750450a4df0200a6e7958f9c96a09b91e3ccd29c60712f2b9d3458f1234aa","src/ir/traversal.rs":"da73d3fafa594fb12c13136f5c9d6e6ee0d6c7fa4b6e57863638d4ba5ef55dfa","src/ir/ty.rs":"7d16711a053f1b4e9d11d3c5665062a58278a869b196a24b7b2a62227eb3a38f","src/ir/var.rs":"57c8aa9f834c6f06418f7d471b1771bbb915821ef0d194b383be60092edca5f7","src/lib.rs":"99ed57a26e794ce724e434af0bf4f72b199bd0e1c42833324beb939acda9af33","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"3df0214f428c221604420f8eb511b1c1e6d9326d1c6d3d2dea48278f833f9b77","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"a55241f2117f15729d174790f386e255fcb224b692325bbe6716dbb1d6874881","src/time.rs":"a02befb48d10dcc31e3f9571b2fa1c40f97fafe6f6ae7d7fc4f8fd01f1a169ba"},"package":"603ed8d8392ace9581e834e26bd09799bf1e989a79bd1aedbb893e72962bdc6e"}
\ No newline at end of file
+{"files":{"Cargo.toml":"6edcfceba304030bee629d3f492c5b43e810f104f12e945a98e6e643a786a138","LICENSE":"1d2e4bdb9d94ab020e9550136cae9ec73fc699c3c96a9d98078c542e9b93d294","README.md":"630d1a1d123c131bad0fec23173e263ba8ecc064b5cd8446d4cab7ffd197db45","build.rs":"032a1c51963894a421b0535f9227796d88768ac5f665a81d2edced69dc6d106a","src/callbacks.rs":"9d41b7848cea37e8741fa7bc947ba58a83647824b1a0bbe7ff75012c412eab13","src/clang.rs":"5ce27d0772e66f9f2f94a55afd18c98d4a57aed4c1d57da53b025fbbbb81a287","src/codegen/bitfield_unit.rs":"d1a6b5ba96a60f9ef506baf14c7366cf056db6f282b29cab9b2635e8f817de60","src/codegen/bitfield_unit_tests.rs":"2073ac6a36e0bc9afaef5b1207966817c8fb7a1a9f6368c3b1b8f79822efbfba","src/codegen/error.rs":"2613af1d833377fd4a70719f4a09951d9d45dc9227827b9a2a938a1bcaaea2dd","src/codegen/helpers.rs":"8badd4b5ba38b83477c3ee3fc6f9ca79059b5650f5b489b2723c335037e27d92","src/codegen/impl_debug.rs":"e2ffd5b6ed936698aa4b9e7e3459d353383792707ad51f829a18a822f69cab0e","src/codegen/impl_partialeq.rs":"d69e2a9cdf2fdea74a60532109c0a1a75791f5a5ef931b28c5d447fa2915e5d3","src/codegen/mod.rs":"c923594d8d27dede9192dd1081acdedf97d67430f780e3dc4db39b8928a55d71","src/codegen/struct_layout.rs":"9bd0e3455e55e2a1faa4f332a327c2529a21bdfdd0fcb3a45bc5cdd7801d288f","src/extra_assertions.rs":"449549c4a7a50c3f0b06332452b2fb6c9b23f31ca8e5e1656fe6c7f21e8ef7fa","src/features.rs":"a437e6f736c8fba81ff74d90e7ddd142192ae5ffb9240d8c885eb84be6f2fe45","src/ir/analysis/derive_copy.rs":"59c21e299af3a36c6c82d4d16454700238269abd5929ec2d67c8414aebf82c3b","src/ir/analysis/derive_debug.rs":"3530e27ab0e260ec013cee3ad78a81497970c656a8eed589b755cce8caf53040","src/ir/analysis/derive_default.rs":"20e9dac151fadc59a7926ed9276ee8ced47e59c3f0c43f69fdafb75706045aca","src/ir/analysis/derive_hash.rs":"85c73c5660dc311ab6c15a21b69c4c5d2aa380d740decaf59ad594a6728cbe1f","src/ir/analysis/derive_partialeq_or_partialord.rs":"fb9540c324fdfcc9b0ae816e7713af000b11f5e2768c512c22a3082f263bb6bc","src/ir/analysis/has_destructor.rs":"d9aaaceba580b48eb0df4e5537b34b417c51ccdfeb8f6b72484f3bf4992317fe","src/ir/analysis/has_float.rs":"2a0465503d2c8247eaf916bd6a03594f3dc0370533d9a7c58cc5afb86693816c","src/ir/analysis/has_type_param_in_array.rs":"fcb1c78b6000f1f5eb8d8147e2afdaba9eb0e3a81b61e72537048dfdbeea7bcd","src/ir/analysis/has_vtable.rs":"37765e954ef792e369a58ccfe1d827a00fe9bce680466da1d6523671b94b6c92","src/ir/analysis/mod.rs":"ea5ace45c77e855674bb565ba0fef556f60e3293b0ddcf11d3a5a6ec15ab0648","src/ir/analysis/sizedness.rs":"3d3c8bde40604d53bb64273a3cbd8c55936a7dfe1de9b2ba92fc2c45572624b4","src/ir/analysis/template_params.rs":"6554dd1240142ec0e7299e678b696725f5cba99243d1c3d1cbf58d4764082fd6","src/ir/annotations.rs":"ef106afcbe6084c18bd13a37ee3c1cdc9596bfb055db8c773d81f8f15fec3208","src/ir/comment.rs":"000481754e5433d7c0886b9ce8b93b64c7ab1ae52867d211c73c7c4b336649a2","src/ir/comp.rs":"7b22f3ff19ca45a6fbfe7ea015109d43f4ddf65b33b47b1c37829fcb87cdff9b","src/ir/context.rs":"80679859b4efa52d74b0c7501bb5951b58c9705a97b96c9fc05134d1abe401c6","src/ir/derive.rs":"9550d01731ca66be28124c91fd0211a618743a065bec7b00e27c934afff82a84","src/ir/dot.rs":"d01f1621ab67e368d854a82bd6bb0b8dd52f3c2c733de8eaf81aece9543818cb","src/ir/enum_ty.rs":"3611100df8ddf01b010d2eae1d26a67df022e47b6236b0ed9d1b9b42340ebafd","src/ir/function.rs":"b86e665c6659c32bce39194240e7da6221c5a2ec51b362ad9f6e34f1bc396a6f","src/ir/int.rs":"1f61a472288afe489d9320bc8b13920333ece57891ae8570b4c4f25ab50688e6","src/ir/item.rs":"6f0d13615c6883b5e64c75f6d18d79b978b47aa3599ae1f4c196903d2d2cda68","src/ir/item_kind.rs":"dbeae8c4fd0e5c9485d325aea040e056a1f2cd6d43fc927dee8fe1c0c59a7197","src/ir/layout.rs":"17daab0a80564006de1f6b6190e7d9e6c5eb96990242fe707f8dc676f7110c18","src/ir/mod.rs":"2eae90f207fad2e45957ec9287064992a419e3fc916aba84faff2ea25cbeb5ee","src/ir/module.rs":"c4d90bf38fe3672e01923734ccbdb7951ea929949d5f413a9c2aee12395a5094","src/ir/objc.rs":"828a890acdc8b10c44e69e2ed4a4f5d8c0e734606d3a8cc71658dcf43a49acf4","src/ir/template.rs":"21ba4cbacafce39b4a8013bc97e8054a906a8bc2f45a51aeef3b007caadde221","src/ir/traversal.rs":"eaca9aa6deb3eea9b8d7adc696781c70038796ff43e536bda47e90f84fe87d61","src/ir/ty.rs":"daa95f757288e4bfe84f9724c5f5df127b6f3a4452330691a24a94369db7d993","src/ir/var.rs":"57c8aa9f834c6f06418f7d471b1771bbb915821ef0d194b383be60092edca5f7","src/lib.rs":"d4217ac878659794c5990b1612f1de12f94f975257ea14307fb7dc63ac3a76da","src/log_stubs.rs":"6dfdd908b7c6453da416cf232893768f9480e551ca4add0858ef88bf71ee6ceb","src/main.rs":"e519053bcdde6bc88f60f955246a02d53b3db1cc5ccd1612e6675b790b7460b0","src/options.rs":"f1be872e418a96582b048c095ceeeba012b92ffed8a9658cd1fd9ae0e192372d","src/parse.rs":"be7d13cc84fae79ec7b3aa9e77063fa475a48d74a854423e2c72d75006a25202","src/regex_set.rs":"a55241f2117f15729d174790f386e255fcb224b692325bbe6716dbb1d6874881","src/time.rs":"3b763e6fee51d0eb01228dfe28bc28a9f692aff73b2a7b90a030902e0238fca6"},"package":"a847bea78e36c3d5d99ca99cac82a24d56e5f2105e402f3941a190bf92146579"}
\ No newline at end of file
--- a/third_party/rust/bindgen/Cargo.toml
+++ b/third_party/rust/bindgen/Cargo.toml
@@ -7,21 +7,22 @@
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "bindgen"
-version = "0.33.2"
+version = "0.37.3"
authors = ["Jyun-Yan You <jyyou.tw@gmail.com>", "Emilio Cobos Álvarez <emilio@crisal.io>", "Nick Fitzgerald <fitzgen@gmail.com>", "The Servo project developers"]
build = "build.rs"
-include = ["Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
+include = ["LICENSE", "README.md", "Cargo.toml", "build.rs", "src/*.rs", "src/**/*.rs"]
description = "Automatically generates Rust FFI bindings to C and C++ libraries."
+homepage = "https://rust-lang-nursery.github.io/rust-bindgen/"
documentation = "https://docs.rs/bindgen"
readme = "README.md"
keywords = ["bindings", "ffi", "code-generation"]
categories = ["external-ffi-bindings", "development-tools::ffi"]
license = "BSD-3-Clause"
repository = "https://github.com/rust-lang-nursery/rust-bindgen"
[lib]
@@ -33,18 +34,18 @@ path = "src/main.rs"
doc = false
[dependencies.cexpr]
version = "0.2"
[dependencies.cfg-if]
version = "0.1.0"
[dependencies.clang-sys]
-version = "0.22.0"
-features = ["runtime", "clang_3_9"]
+version = "0.23"
+features = ["runtime", "clang_6_0"]
[dependencies.clap]
version = "2"
[dependencies.env_logger]
version = "0.5"
optional = true
@@ -53,21 +54,26 @@ version = "1"
[dependencies.log]
version = "0.4"
optional = true
[dependencies.peeking_take_while]
version = "0.1.2"
+[dependencies.proc-macro2]
+version = "0.3.2, < 0.3.6"
+default-features = false
+
[dependencies.quote]
-version = "0.3.15"
+version = "0.5"
+default-features = false
[dependencies.regex]
-version = "0.2"
+version = "1.0"
[dependencies.which]
version = "1.0.2"
[dev-dependencies.clap]
version = "2"
[dev-dependencies.diff]
version = "0.1"
@@ -79,10 +85,11 @@ version = "0.1"
default = ["logging"]
logging = ["env_logger", "log"]
static = []
testing_only_docs = []
testing_only_extra_assertions = []
testing_only_libclang_3_8 = []
testing_only_libclang_3_9 = []
testing_only_libclang_4 = []
+testing_only_libclang_5 = []
[badges.travis-ci]
repository = "rust-lang-nursery/rust-bindgen"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/bindgen/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2013, Jyun-Yan You
+
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+3. Neither the name of the author nor the names of his contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGE.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/bindgen/README.md
@@ -0,0 +1,45 @@
+# `bindgen`
+
+[`impl period`](https://blog.rust-lang.org/2017/09/18/impl-future-for-rust.html) has been started! Join us at [Gitter.im](https://gitter.im/rust-impl-period/WG-dev-tools-bindgen).
+
+**`bindgen` automatically generates Rust FFI bindings to C (and some C++) libraries.**
+
+For example, given the C header `doggo.h`:
+
+```c
+typedef struct Doggo {
+ int many;
+ char wow;
+} Doggo;
+
+void eleven_out_of_ten_majestic_af(Doggo* pupper);
+```
+
+`bindgen` produces Rust FFI code allowing you to call into the `doggo` library's
+functions and use its types:
+
+```rust
+/* automatically generated by rust-bindgen */
+
+#[repr(C)]
+pub struct Doggo {
+ pub many: ::std::os::raw::c_int,
+ pub wow: ::std::os::raw::c_char,
+}
+
+extern "C" {
+ pub fn eleven_out_of_ten_majestic_af(pupper: *mut Doggo);
+}
+```
+
+## Users Guide
+
+[📚 Read the `bindgen` users guide here! 📚](https://rust-lang-nursery.github.io/rust-bindgen)
+
+## API Reference
+
+[API reference documentation is on docs.rs](https://docs.rs/bindgen)
+
+## Contributing
+
+[See `CONTRIBUTING.md` for hacking on `bindgen`!](./CONTRIBUTING.md)
--- a/third_party/rust/bindgen/src/callbacks.rs
+++ b/third_party/rust/bindgen/src/callbacks.rs
@@ -30,17 +30,17 @@ pub trait ParseCallbacks: fmt::Debug + U
}
/// The integer kind an integer macro should have, given a name and the
/// value of that macro, or `None` if you want the default to be chosen.
fn int_macro(&self, _name: &str, _value: i64) -> Option<IntKind> {
None
}
- /// This function should return whether, given the a given enum variant
+ /// This function should return whether, given an enum variant
/// name, and value, this enum variant will forcibly be a constant.
fn enum_variant_behavior(
&self,
_enum_name: Option<&str>,
_original_variant_name: &str,
_variant_value: EnumVariantValue,
) -> Option<EnumVariantCustomBehavior> {
None
--- a/third_party/rust/bindgen/src/clang.rs
+++ b/third_party/rust/bindgen/src/clang.rs
@@ -906,16 +906,23 @@ impl Type {
let size = self.fallible_size()?;
let align = self.fallible_align()?;
Ok(Layout::new(size, align))
}
/// Get the number of template arguments this type has, or `None` if it is
/// not some kind of template.
pub fn num_template_args(&self) -> Option<u32> {
+ // If an old libclang is loaded, we have no hope of answering this
+ // question correctly. However, that's no reason to panic when
+ // generating bindings for simple C headers with an old libclang.
+ if !clang_Type_getNumTemplateArguments::is_loaded() {
+ return None
+ }
+
let n = unsafe { clang_Type_getNumTemplateArguments(self.x) };
if n >= 0 {
Some(n as u32)
} else {
debug_assert_eq!(n, -1);
None
}
}
@@ -970,17 +977,17 @@ impl Type {
let num_elements_returned = unsafe { clang_getNumElements(self.x) };
if num_elements_returned != -1 {
Some(num_elements_returned as usize)
} else {
None
}
}
- /// Get the canonical version of this type. This sees through `typdef`s and
+ /// Get the canonical version of this type. This sees through `typedef`s and
/// aliases to get the underlying, canonical type.
pub fn canonical_type(&self) -> Type {
unsafe {
Type {
x: clang_getCanonicalType(self.x),
}
}
}
@@ -1634,18 +1641,21 @@ pub fn ast_dump(c: &Cursor, depth: isize
}
print_indent(depth, format!(" {}cconv = {}", prefix, ty.call_conv()));
print_indent(
depth,
format!(" {}spelling = \"{}\"", prefix, ty.spelling()),
);
- let num_template_args =
- unsafe { clang_Type_getNumTemplateArguments(ty.x) };
+ let num_template_args = if clang_Type_getNumTemplateArguments::is_loaded() {
+ unsafe { clang_Type_getNumTemplateArguments(ty.x) }
+ } else {
+ -1
+ };
if num_template_args >= 0 {
print_indent(
depth,
format!(
" {}number-of-template-args = {}",
prefix,
num_template_args
),
@@ -1807,8 +1817,40 @@ impl EvalResult {
}
}
impl Drop for EvalResult {
fn drop(&mut self) {
unsafe { clang_EvalResult_dispose(self.x) };
}
}
+
+/// Target information obtained from libclang.
+#[derive(Debug)]
+pub struct TargetInfo {
+ /// The target triple.
+ pub triple: String,
+ /// The width of the pointer _in bits_.
+ pub pointer_width: usize,
+}
+
+impl TargetInfo {
+ /// Tries to obtain target information from libclang.
+ pub fn new(tu: &TranslationUnit) -> Option<Self> {
+ if !clang_getTranslationUnitTargetInfo::is_loaded() {
+ return None;
+ }
+ let triple;
+ let pointer_width;
+ unsafe {
+ let ti = clang_getTranslationUnitTargetInfo(tu.x);
+ triple = cxstring_into_string(clang_TargetInfo_getTriple(ti));
+ pointer_width = clang_TargetInfo_getPointerWidth(ti);
+ clang_TargetInfo_dispose(ti);
+ }
+ assert!(pointer_width > 0);
+ assert_eq!(pointer_width % 8, 0);
+ Some(TargetInfo {
+ triple,
+ pointer_width: pointer_width as usize,
+ })
+ }
+}
--- a/third_party/rust/bindgen/src/codegen/bitfield_unit.rs
+++ b/third_party/rust/bindgen/src/codegen/bitfield_unit.rs
@@ -22,30 +22,40 @@ where
#[inline]
pub fn get_bit(&self, index: usize) -> bool {
debug_assert!(index / 8 < self.storage.as_ref().len());
let byte_index = index / 8;
let byte = self.storage.as_ref()[byte_index];
- let bit_index = index % 8;
+ let mut bit_index = index % 8;
+ #[cfg(target_endian = "big")]
+ {
+ // Adjust the index for endianness.
+ bit_index = 7 - bit_index;
+ }
let mask = 1 << bit_index;
byte & mask == mask
}
#[inline]
pub fn set_bit(&mut self, index: usize, val: bool) {
debug_assert!(index / 8 < self.storage.as_ref().len());
let byte_index = index / 8;
let byte = &mut self.storage.as_mut()[byte_index];
- let bit_index = index % 8;
+ let mut bit_index = index % 8;
+ #[cfg(target_endian = "big")]
+ {
+ // Adjust the index for endianness.
+ bit_index = 7 - bit_index;
+ }
let mask = 1 << bit_index;
if val {
*byte |= mask;
} else {
*byte &= !mask;
}
}
@@ -55,28 +65,40 @@ where
debug_assert!(bit_width <= 64);
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
let mut val = 0;
for i in 0..(bit_width as usize) {
if self.get_bit(i + bit_offset) {
- val |= 1 << i;
+ let mut index = i;
+ #[cfg(target_endian = "big")]
+ {
+ // Adjust the index for endianness.
+ index = bit_width as usize - 1 - index;
+ }
+ val |= 1 << index;
}
}
val
}
#[inline]
pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
debug_assert!(bit_width <= 64);
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
for i in 0..(bit_width as usize) {
let mask = 1 << i;
let val_bit_is_set = val & mask == mask;
- self.set_bit(i + bit_offset, val_bit_is_set);
+ let mut index = i;
+ #[cfg(target_endian = "big")]
+ {
+ // Adjust the index for endianness.
+ index = bit_width as usize - 1 - index;
+ }
+ self.set_bit(index + bit_offset, val_bit_is_set);
}
}
}
--- a/third_party/rust/bindgen/src/codegen/helpers.rs
+++ b/third_party/rust/bindgen/src/codegen/helpers.rs
@@ -1,53 +1,55 @@
//! Helpers for code generation that don't need macro expansion.
use ir::context::BindgenContext;
use ir::layout::Layout;
use quote;
use std::mem;
+use proc_macro2::{Term, Span};
pub mod attributes {
use quote;
+ use proc_macro2::{Term, Span};
pub fn repr(which: &str) -> quote::Tokens {
- let which = quote::Ident::new(which);
+ let which = Term::new(which, Span::call_site());
quote! {
#[repr( #which )]
}
}
pub fn repr_list(which_ones: &[&str]) -> quote::Tokens {
- let which_ones = which_ones.iter().cloned().map(quote::Ident::new);
+ let which_ones = which_ones.iter().cloned().map(|one| Term::new(one, Span::call_site()));
quote! {
#[repr( #( #which_ones ),* )]
}
}
pub fn derives(which_ones: &[&str]) -> quote::Tokens {
- let which_ones = which_ones.iter().cloned().map(quote::Ident::new);
+ let which_ones = which_ones.iter().cloned().map(|one| Term::new(one, Span::call_site()));
quote! {
#[derive( #( #which_ones ),* )]
}
}
pub fn inline() -> quote::Tokens {
quote! {
#[inline]
}
}
pub fn doc(comment: String) -> quote::Tokens {
// Doc comments are already preprocessed into nice `///` formats by the
// time they get here. Just make sure that we have newlines around it so
// that nothing else gets wrapped into the comment.
let mut tokens = quote! {};
- tokens.append("\n");
- tokens.append(comment);
- tokens.append("\n");
+ tokens.append(Term::new("\n", Span::call_site()));
+ tokens.append(Term::new(&comment, Span::call_site()));
+ tokens.append(Term::new("\n", Span::call_site()));
tokens
}
pub fn link_name(name: &str) -> quote::Tokens {
// LLVM mangles the name by default but it's already mangled.
// Prefixing the name with \u{1} should tell LLVM to not mangle it.
let name = format!("\u{1}{}", name);
quote! {
@@ -68,17 +70,17 @@ pub fn blob(layout: Layout) -> quote::To
let ty_name = match opaque.known_rust_type_for_array() {
Some(ty) => ty,
None => {
warn!("Found unknown alignment on code generation!");
"u8"
}
};
- let ty_name = quote::Ident::new(ty_name);
+ let ty_name = Term::new(ty_name, Span::call_site());
let data_len = opaque.array_size().unwrap_or(layout.size);
if data_len == 1 {
quote! {
#ty_name
}
} else {
@@ -98,39 +100,40 @@ pub fn integer_type(layout: Layout) -> O
}
}
/// Generates a bitfield allocation unit type for a type with the given `Layout`.
pub fn bitfield_unit(ctx: &BindgenContext, layout: Layout) -> quote::Tokens {
let mut tokens = quote! {};
if ctx.options().enable_cxx_namespaces {
- tokens.append(quote! { root:: });
+ tokens.append_all(quote! { root:: });
}
let align = match layout.align {
n if n >= 8 => quote! { u64 },
4 => quote! { u32 },
2 => quote! { u16 },
_ => quote! { u8 },
};
let size = layout.size;
- tokens.append(quote! {
+ tokens.append_all(quote! {
__BindgenBitfieldUnit<[u8; #size], #align>
});
tokens
}
pub mod ast_ty {
use ir::context::BindgenContext;
use ir::function::FunctionSig;
use ir::ty::FloatKind;
use quote;
+ use proc_macro2;
pub fn raw_type(ctx: &BindgenContext, name: &str) -> quote::Tokens {
let ident = ctx.rust_ident_raw(name);
match ctx.options().ctypes_prefix {
Some(ref prefix) => {
let prefix = ctx.rust_ident_raw(prefix.as_str());
quote! {
#prefix::#ident
@@ -161,59 +164,48 @@ pub mod ast_ty {
(FloatKind::Double, false) |
(FloatKind::LongDouble, false) => raw_type(ctx, "c_double"),
(FloatKind::Float128, _) => quote! { [u8; 16] },
}
}
pub fn int_expr(val: i64) -> quote::Tokens {
// Don't use quote! { #val } because that adds the type suffix.
- let mut tokens = quote! {};
- tokens.append(val.to_string());
- tokens
+ let val = proc_macro2::Literal::i64_unsuffixed(val);
+ quote!(#val)
}
pub fn uint_expr(val: u64) -> quote::Tokens {
// Don't use quote! { #val } because that adds the type suffix.
- let mut tokens = quote! {};
- tokens.append(val.to_string());
- tokens
+ let val = proc_macro2::Literal::u64_unsuffixed(val);
+ quote!(#val)
}
pub fn byte_array_expr(bytes: &[u8]) -> quote::Tokens {
let mut bytes: Vec<_> = bytes.iter().cloned().collect();
bytes.push(0);
- quote! {
- #bytes
- }
+ quote! { [ #(#bytes),* ] }
}
pub fn cstr_expr(mut string: String) -> quote::Tokens {
string.push('\0');
- let b = quote::ByteStr(&string);
+ let b = proc_macro2::Literal::byte_string(&string.as_bytes());
quote! {
#b
}
}
pub fn float_expr(
ctx: &BindgenContext,
f: f64,
) -> Result<quote::Tokens, ()> {
if f.is_finite() {
- let mut string = f.to_string();
+ let val = proc_macro2::Literal::f64_unsuffixed(f);
- // So it gets properly recognised as a floating point constant.
- if !string.contains('.') {
- string.push('.');
- }
-
- let mut tokens = quote! {};
- tokens.append(string);
- return Ok(tokens);
+ return Ok(quote!(#val));
}
let prefix = ctx.trait_prefix();
if f.is_nan() {
return Ok(quote! {
::#prefix::f64::NAN
});
--- a/third_party/rust/bindgen/src/codegen/impl_partialeq.rs
+++ b/third_party/rust/bindgen/src/codegen/impl_partialeq.rs
@@ -1,31 +1,32 @@
use ir::comp::{CompInfo, CompKind, Field, FieldMethods};
use ir::context::BindgenContext;
use ir::item::{IsOpaque, Item};
use ir::ty::{TypeKind, RUST_DERIVE_IN_ARRAY_LIMIT};
use quote;
+use proc_macro2;
/// Generate a manual implementation of `PartialEq` trait for the
/// specified compound type.
pub fn gen_partialeq_impl(
ctx: &BindgenContext,
comp_info: &CompInfo,
item: &Item,
ty_for_impl: "e::Tokens,
) -> Option<quote::Tokens> {
let mut tokens = vec![];
if item.is_opaque(ctx, &()) {
tokens.push(quote! {
&self._bindgen_opaque_blob[..] == &other._bindgen_opaque_blob[..]
});
} else if comp_info.kind() == CompKind::Union {
- assert!(!ctx.options().rust_features().untagged_union());
+ assert!(!ctx.options().rust_features().untagged_union);
tokens.push(quote! {
&self.bindgen_union_field[..] == &other.bindgen_union_field[..]
});
} else {
for base in comp_info.base_members().iter() {
if !base.requires_storage(ctx) {
continue;
}
@@ -66,17 +67,17 @@ pub fn gen_partialeq_impl(
Some(quote! {
fn eq(&self, other: & #ty_for_impl) -> bool {
#( #tokens )&&*
}
})
}
fn gen_field(ctx: &BindgenContext, ty_item: &Item, name: &str) -> quote::Tokens {
- fn quote_equals(name_ident: quote::Ident) -> quote::Tokens {
+ fn quote_equals(name_ident: proc_macro2::Term) -> quote::Tokens {
quote! { self.#name_ident == other.#name_ident }
}
let name_ident = ctx.rust_ident(name);
let ty = ty_item.expect_type();
match *ty.kind() {
TypeKind::Void |
--- a/third_party/rust/bindgen/src/codegen/mod.rs
+++ b/third_party/rust/bindgen/src/codegen/mod.rs
@@ -33,24 +33,25 @@ use ir::item_kind::ItemKind;
use ir::layout::Layout;
use ir::module::Module;
use ir::objc::{ObjCInterface, ObjCMethod};
use ir::template::{AsTemplateParam, TemplateInstantiation, TemplateParameters};
use ir::ty::{Type, TypeKind};
use ir::var::Var;
use quote;
-
+use proc_macro2::{self, Term, Span};
+
+use std;
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::{HashSet, VecDeque};
use std::collections::hash_map::{Entry, HashMap};
use std::fmt::Write;
use std::iter;
-use std::mem;
use std::ops;
// Name of type defined in constified enum module
pub static CONSTIFIED_ENUM_MODULE_REPR_NAME: &'static str = "Type";
fn top_level_path(ctx: &BindgenContext, item: &Item) -> Vec<quote::Tokens> {
let mut path = vec![quote! { self }];
@@ -70,17 +71,17 @@ fn root_import(ctx: &BindgenContext, mod
let mut path = top_level_path(ctx, module);
let root = ctx.root_module().canonical_name(ctx);
let root_ident = ctx.rust_ident(&root);
path.push(quote! { #root_ident });
let mut tokens = quote! {};
- tokens.append_separated(path, "::");
+ tokens.append_separated(path, Term::new("::", Span::call_site()));
quote! {
#[allow(unused_imports)]
use #tokens ;
}
}
struct CodegenResult<'a> {
@@ -294,27 +295,22 @@ impl AppendImplicitTemplateParams for qu
TypeKind::Enum(..) |
TypeKind::BlockPointer |
TypeKind::ObjCId |
TypeKind::ObjCSel |
TypeKind::TemplateInstantiation(..) => return,
_ => {},
}
- if let Some(params) = item.used_template_params(ctx) {
- if params.is_empty() {
- return;
- }
-
- let params = params.into_iter().map(|p| {
- p.try_to_rust_ty(ctx, &())
- .expect("template params cannot fail to be a rust type")
- });
-
- self.append(quote! {
+ let params: Vec<_> = item.used_template_params(ctx).iter().map(|p| {
+ p.try_to_rust_ty(ctx, &())
+ .expect("template params cannot fail to be a rust type")
+ }).collect();
+ if !params.is_empty() {
+ self.append_all(quote! {
< #( #params ),* >
});
}
}
}
trait CodeGenerator {
/// Extra information from the caller.
@@ -399,17 +395,17 @@ impl CodeGenerator for Module {
if item.id() == ctx.root_module() {
if result.saw_bindgen_union {
utils::prepend_union_types(ctx, &mut *result);
}
if result.saw_incomplete_array {
utils::prepend_incomplete_array_types(ctx, &mut *result);
}
- if ctx.need_bindegen_complex_type() {
+ if ctx.need_bindgen_complex_type() {
utils::prepend_complex_type(&mut *result);
}
if result.saw_objc {
utils::prepend_objc_header(ctx, &mut *result);
}
if result.saw_bitfield_unit {
utils::prepend_bitfield_unit_type(&mut *result);
}
@@ -422,35 +418,46 @@ impl CodeGenerator for Module {
{
codegen_self(result, &mut false);
return;
}
let mut found_any = false;
let inner_items = result.inner(|result| {
result.push(root_import(ctx, item));
+
+ let path = item.namespace_aware_canonical_path(ctx).join("::");
+ if let Some(raw_lines) = ctx.options().module_lines.get(&path) {
+ for raw_line in raw_lines {
+ found_any = true;
+ // FIXME(emilio): The use of `Term` is an abuse, but we abuse it
+ // in a bunch more places.
+ let line = Term::new(raw_line, Span::call_site());
+ result.push(quote! { #line });
+ }
+ }
+
codegen_self(result, &mut found_any);
});
// Don't bother creating an empty module.
if !found_any {
return;
}
let name = item.canonical_name(ctx);
-
- result.push(if name == "root" {
+ let ident = ctx.rust_ident(name);
+ result.push(if item.id() == ctx.root_module() {
quote! {
#[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
- pub mod root {
+ pub mod #ident {
#( #inner_items )*
}
}
} else {
- let ident = ctx.rust_ident(name);
quote! {
pub mod #ident {
#( #inner_items )*
}
}
});
}
}
@@ -474,21 +481,18 @@ impl CodeGenerator for Var {
}
result.saw_var(&canonical_name);
let canonical_ident = ctx.rust_ident(&canonical_name);
// We can't generate bindings to static variables of templates. The
// number of actual variables for a single declaration are open ended
// and we don't know what instantiations do or don't exist.
- let type_params = item.all_template_params(ctx);
- if let Some(params) = type_params {
- if !params.is_empty() {
- return;
- }
+ if !item.all_template_params(ctx).is_empty() {
+ return;
}
let ty = self.ty().to_rust_ty_or_opaque(ctx, &());
if let Some(val) = self.val() {
match *val {
VarType::Bool(val) => {
result.push(quote! {
@@ -631,25 +635,20 @@ impl CodeGenerator for Type {
// If this is a known named type, disallow generating anything
// for it too.
let spelling = self.name().expect("Unnamed alias?");
if utils::type_from_named(ctx, spelling).is_some() {
return;
}
- let mut outer_params = item.used_template_params(ctx)
- .and_then(|ps| if ps.is_empty() {
- None
- } else {
- Some(ps)
- });
+ let mut outer_params = item.used_template_params(ctx);
let inner_rust_type = if item.is_opaque(ctx, &()) {
- outer_params = None;
+ outer_params = vec![];
self.to_opaque(ctx, item)
} else {
// Its possible that we have better layout information than
// the inner type does, so fall back to an opaque blob based
// on our layout if converting the inner item fails.
let mut inner_ty = inner_item
.try_to_rust_ty_or_opaque(ctx, &())
.unwrap_or_else(|_| self.to_opaque(ctx, item));
@@ -686,68 +685,66 @@ impl CodeGenerator for Type {
let mut tokens = if let Some(comment) = item.comment(ctx) {
attributes::doc(comment)
} else {
quote! {}
};
// We prefer using `pub use` over `pub type` because of:
// https://github.com/rust-lang/rust/issues/26264
- if inner_rust_type.as_str()
+ if inner_rust_type.to_string()
.chars()
.all(|c| match c {
// These are the only characters allowed in simple
// paths, eg `good::dogs::Bront`.
'A'...'Z' | 'a'...'z' | '0'...'9' | ':' | '_' | ' ' => true,
_ => false,
}) &&
- outer_params.is_none() &&
+ outer_params.is_empty() &&
inner_item.expect_type().canonical_type(ctx).is_enum()
{
- tokens.append(quote! {
+ tokens.append_all(quote! {
pub use
});
let path = top_level_path(ctx, item);
- tokens.append_separated(path, "::");
- tokens.append(quote! {
+ tokens.append_separated(path, Term::new("::", Span::call_site()));
+ tokens.append_all(quote! {
:: #inner_rust_type as #rust_name ;
});
result.push(tokens);
return;
}
- tokens.append(quote! {
+ tokens.append_all(quote! {
pub type #rust_name
});
- if let Some(params) = outer_params {
- let params: Vec<_> = params.into_iter()
- .filter_map(|p| p.as_template_param(ctx, &()))
- .collect();
- if params.iter().any(|p| ctx.resolve_type(*p).is_invalid_type_param()) {
- warn!(
- "Item contained invalid template \
- parameter: {:?}",
- item
- );
- return;
- }
-
- let params = params.iter()
- .map(|p| {
- p.try_to_rust_ty(ctx, &())
- .expect("type parameters can always convert to rust ty OK")
- });
-
- tokens.append(quote! {
+ let params: Vec<_> = outer_params.into_iter()
+ .filter_map(|p| p.as_template_param(ctx, &()))
+ .collect();
+ if params.iter().any(|p| ctx.resolve_type(*p).is_invalid_type_param()) {
+ warn!(
+ "Item contained invalid template \
+ parameter: {:?}",
+ item
+ );
+ return;
+ }
+ let params: Vec<_> = params.iter().map(|p| {
+ p.try_to_rust_ty(ctx, &())
+ .expect("type parameters can always convert to rust ty OK")
+ }).collect();
+
+ if !params.is_empty() {
+ tokens.append_all(quote! {
< #( #params ),* >
});
}
- tokens.append(quote! {
+ tokens.append_all(quote! {
= #inner_rust_type ;
});
result.push(tokens);
}
TypeKind::Enum(ref ei) => ei.codegen(ctx, result, item),
TypeKind::ObjCId | TypeKind::ObjCSel => {
result.saw_objc();
@@ -1054,21 +1051,21 @@ impl<'a> FieldCodegen<'a> for FieldData
let is_private = self.annotations().private_fields().unwrap_or(
fields_should_be_private,
);
let accessor_kind =
self.annotations().accessor_kind().unwrap_or(accessor_kind);
if is_private {
- field.append(quote! {
+ field.append_all(quote! {
#field_ident : #ty ,
});
} else {
- field.append(quote! {
+ field.append_all(quote! {
pub #field_ident : #ty ,
});
}
fields.extend(Some(field));
// TODO: Factor the following code out, please!
if accessor_kind == FieldAccessorKind::None {
@@ -1118,17 +1115,17 @@ impl<'a> FieldCodegen<'a> for FieldData
}
}));
}
}
impl BitfieldUnit {
/// Get the constructor name for this bitfield unit.
fn ctor_name(&self) -> quote::Tokens {
- let ctor_name = quote::Ident::new(format!("new_bitfield_{}", self.nth()));
+ let ctor_name = Term::new(&format!("new_bitfield_{}", self.nth()), Span::call_site());
quote! {
#ctor_name
}
}
}
impl Bitfield {
/// Extend an under construction bitfield unit constructor with this
@@ -1149,17 +1146,17 @@ impl Bitfield {
"Bitfield without layout? Gah!",
);
let bitfield_int_ty = helpers::blob(bitfield_ty_layout);
let offset = self.offset_into_unit();
let width = self.width() as u8;
let prefix = ctx.trait_prefix();
- ctor_impl.append(quote! {
+ ctor_impl.append_all(quote! {
__bindgen_bitfield_unit.set(
#offset,
#width,
{
let #param_name: #bitfield_int_ty = unsafe {
::#prefix::mem::transmute(#param_name)
};
#param_name as u64
@@ -1317,17 +1314,17 @@ impl<'a> FieldCodegen<'a> for Bitfield {
(unit_field_name, bitfield_representable_as_int): (&'a str, &mut bool),
) where
F: Extend<quote::Tokens>,
M: Extend<quote::Tokens>,
{
let prefix = ctx.trait_prefix();
let getter_name = bitfield_getter_name(ctx, self);
let setter_name = bitfield_setter_name(ctx, self);
- let unit_field_ident = quote::Ident::new(unit_field_name);
+ let unit_field_ident = Term::new(unit_field_name, Span::call_site());
let bitfield_ty_item = ctx.resolve_item(self.ty());
let bitfield_ty = bitfield_ty_item.expect_type();
let bitfield_ty_layout = bitfield_ty.layout(ctx).expect(
"Bitfield without layout? Gah!",
);
let bitfield_int_ty = match helpers::integer_type(bitfield_ty_layout) {
@@ -1413,18 +1410,16 @@ impl CodeGenerator for CompInfo {
debug_assert!(item.is_enabled_for_codegen(ctx));
// Don't output classes with template parameters that aren't types, and
// also don't output template specializations, neither total or partial.
if self.has_non_type_template_params() {
return;
}
- let used_template_params = item.used_template_params(ctx);
-
let ty = item.expect_type();
let layout = ty.layout(ctx);
let mut packed = self.is_packed(ctx, &layout);
let canonical_name = item.canonical_name(ctx);
let canonical_ident = ctx.rust_ident(&canonical_name);
// Generate the vtable from the method list if appropriate.
@@ -1522,23 +1517,26 @@ impl CodeGenerator for CompInfo {
struct_layout.saw_union(layout);
quote! {
pub bindgen_union_field: #ty ,
}
});
}
+ let mut explicit_align = None;
if is_opaque {
// Opaque item should not have generated methods, fields.
debug_assert!(fields.is_empty());
debug_assert!(methods.is_empty());
match layout {
Some(l) => {
+ explicit_align = Some(l.align);
+
let ty = helpers::blob(l);
fields.push(quote! {
pub _bindgen_opaque_blob: #ty ,
});
}
None => {
warn!("Opaque type without layout! Expect dragons!");
}
@@ -1550,16 +1548,17 @@ impl CodeGenerator for CompInfo {
fields.push(padding_field);
}
if let Some(layout) = layout {
if struct_layout.requires_explicit_align(layout) {
if layout.align == 1 {
packed = true;
} else {
+ explicit_align = Some(layout.align);
let ty = helpers::blob(Layout::new(0, layout.align));
fields.push(quote! {
pub __bindgen_align: #ty ,
});
}
}
}
}
@@ -1592,31 +1591,29 @@ impl CodeGenerator for CompInfo {
fields.push(quote! {
pub _address: #ty,
});
}
}
let mut generic_param_names = vec![];
- if let Some(ref params) = used_template_params {
- for (idx, ty) in params.iter().enumerate() {
- let param = ctx.resolve_type(*ty);
- let name = param.name().unwrap();
- let ident = ctx.rust_ident(name);
- generic_param_names.push(ident.clone());
-
- let prefix = ctx.trait_prefix();
- let field_name = ctx.rust_ident(format!("_phantom_{}", idx));
- fields.push(quote! {
- pub #field_name : ::#prefix::marker::PhantomData<
- ::#prefix::cell::UnsafeCell<#ident>
- > ,
- });
- }
+ for (idx, ty) in item.used_template_params(ctx).iter().enumerate() {
+ let param = ctx.resolve_type(*ty);
+ let name = param.name().unwrap();
+ let ident = ctx.rust_ident(name);
+ generic_param_names.push(ident.clone());
+
+ let prefix = ctx.trait_prefix();
+ let field_name = ctx.rust_ident(format!("_phantom_{}", idx));
+ fields.push(quote! {
+ pub #field_name : ::#prefix::marker::PhantomData<
+ ::#prefix::cell::UnsafeCell<#ident>
+ > ,
+ });
}
let generics = if !generic_param_names.is_empty() {
let generic_param_names = generic_param_names.clone();
quote! {
< #( #generic_param_names ),* >
}
} else {
@@ -1632,36 +1629,50 @@ impl CodeGenerator for CompInfo {
attributes.push(attributes::doc(comment));
}
if packed && !is_opaque {
attributes.push(attributes::repr_list(&["C", "packed"]));
} else {
attributes.push(attributes::repr("C"));
}
+ if ctx.options().rust_features().repr_align {
+ if let Some(explicit) = explicit_align {
+ // Ensure that the struct has the correct alignment even in
+ // presence of alignas.
+ let explicit = helpers::ast_ty::int_expr(explicit as i64);
+ attributes.push(quote! {
+ #[repr(align(#explicit))]
+ });
+ }
+ }
+
+
let mut derives = vec![];
if item.can_derive_debug(ctx) {
derives.push("Debug");
} else {
needs_debug_impl = ctx.options().derive_debug &&
ctx.options().impl_debug
}
if item.can_derive_default(ctx) {
derives.push("Default");
} else {
needs_default_impl =
ctx.options().derive_default && !self.is_forward_declaration();
}
+ let all_template_params = item.all_template_params(ctx);
+
if item.can_derive_copy(ctx) && !item.annotations().disallow_copy() {
derives.push("Copy");
- if ctx.options().rust_features().builtin_clone_impls() ||
- used_template_params.is_some()
+ if ctx.options().rust_features().builtin_clone_impls ||
+ !all_template_params.is_empty()
{
// FIXME: This requires extra logic if you have a big array in a
// templated struct. The reason for this is that the magic:
// fn clone(&self) -> Self { *self }
// doesn't work for templates.
//
// It's not hard to fix though.
derives.push("Clone");
@@ -1706,17 +1717,17 @@ impl CodeGenerator for CompInfo {
}
} else {
quote! {
#( #attributes )*
pub struct #canonical_ident
}
};
- tokens.append(quote! {
+ tokens.append_all(quote! {
#generics {
#( #fields )*
}
});
result.push(tokens);
// Generate the inner types and all that stuff.
//
@@ -1729,45 +1740,46 @@ impl CodeGenerator for CompInfo {
}
// NOTE: Some unexposed attributes (like alignment attributes) may
// affect layout, so we're bad and pray to the gods for avoid sending
// all the tests to shit when parsing things like max_align_t.
if self.found_unknown_attr() {
warn!(
"Type {} has an unkown attribute that may affect layout",
- canonical_ident
+ canonical_ident.as_str()
);
}
- if used_template_params.is_none() {
+ if all_template_params.is_empty() {
if !is_opaque {
for var in self.inner_vars() {
ctx.resolve_item(*var).codegen(ctx, result, &());
}
}
if ctx.options().layout_tests && !self.is_forward_declaration() {
if let Some(layout) = layout {
let fn_name =
- format!("bindgen_test_layout_{}", canonical_ident);
+ format!("bindgen_test_layout_{}", canonical_ident.as_str());
let fn_name = ctx.rust_ident_raw(fn_name);
let prefix = ctx.trait_prefix();
let size_of_expr = quote! {
::#prefix::mem::size_of::<#canonical_ident>()
};
let align_of_expr = quote! {
::#prefix::mem::align_of::<#canonical_ident>()
};
let size = layout.size;
let align = layout.align;
let check_struct_align =
- if align > mem::size_of::<*mut ()>() {
- // FIXME when [RFC 1358](https://github.com/rust-lang/rust/issues/33626) ready
+ if align > ctx.target_pointer_size() &&
+ !ctx.options().rust_features().repr_align
+ {
None
} else {
Some(quote! {
assert_eq!(#align_of_expr,
#align,
concat!("Alignment of ", stringify!(#canonical_ident)));
})
@@ -1991,17 +2003,17 @@ impl MethodCodegen for Method {
_ => function.name().to_owned(),
};
let signature = match *signature_item.expect_type().kind() {
TypeKind::Function(ref sig) => sig,
_ => panic!("How in the world?"),
};
- if let (Abi::ThisCall, false) = (signature.abi(), ctx.options().rust_features().thiscall_abi()) {
+ if let (Abi::ThisCall, false) = (signature.abi(), ctx.options().rust_features().thiscall_abi) {
return;
}
// Do not generate variadic methods, since rust does not allow
// implementing them, and we don't do a good job at it anyway.
if signature.is_variadic() {
return;
}
@@ -2087,55 +2099,84 @@ impl MethodCodegen for Method {
pub unsafe fn #name ( #( #args ),* ) #ret {
#block
}
});
}
}
/// A helper type that represents different enum variations.
-#[derive(Copy, Clone)]
-enum EnumVariation {
+#[derive(Copy, Clone, PartialEq, Debug)]
+pub enum EnumVariation {
+ /// The code for this enum will use a Rust enum
Rust,
+ /// The code for this enum will use a bitfield
Bitfield,
+ /// The code for this enum will use consts
Consts,
+ /// The code for this enum will use a module containing consts
ModuleConsts
}
impl EnumVariation {
fn is_rust(&self) -> bool {
match *self {
EnumVariation::Rust => true,
_ => false
}
}
fn is_bitfield(&self) -> bool {
match *self {
- EnumVariation::Bitfield => true,
+ EnumVariation::Bitfield {..} => true,
_ => false
}
}
/// Both the `Const` and `ModuleConsts` variants will cause this to return
/// true.
fn is_const(&self) -> bool {
match *self {
EnumVariation::Consts | EnumVariation::ModuleConsts => true,
_ => false
}
}
}
+impl Default for EnumVariation {
+ fn default() -> EnumVariation {
+ EnumVariation::Consts
+ }
+}
+
+impl std::str::FromStr for EnumVariation {
+ type Err = std::io::Error;
+
+ /// Create a `EnumVariation` from a string.
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ match s {
+ "rust" => Ok(EnumVariation::Rust),
+ "bitfield" => Ok(EnumVariation::Bitfield),
+ "consts" => Ok(EnumVariation::Consts),
+ "moduleconsts" => Ok(EnumVariation::ModuleConsts),
+ _ => Err(std::io::Error::new(std::io::ErrorKind::InvalidInput,
+ concat!("Got an invalid EnumVariation. Accepted values ",
+ "are 'rust', 'bitfield', 'consts', and ",
+ "'moduleconsts'."))),
+ }
+ }
+}
+
+
/// A helper type to construct different enum variations.
enum EnumBuilder<'a> {
Rust {
codegen_depth: usize,
attrs: Vec<quote::Tokens>,
- ident: quote::Ident,
+ ident: Term,
tokens: quote::Tokens,
emitted_any_variants: bool,
},
Bitfield {
codegen_depth: usize,
canonical_name: &'a str,
tokens: quote::Tokens,
},
@@ -2165,32 +2206,32 @@ impl<'a> EnumBuilder<'a> {
/// the representation, and which variation it should be generated as.
fn new(
name: &'a str,
attrs: Vec<quote::Tokens>,
repr: quote::Tokens,
enum_variation: EnumVariation,
enum_codegen_depth: usize,
) -> Self {
- let ident = quote::Ident::new(name);
+ let ident = Term::new(name, Span::call_site());
match enum_variation {
EnumVariation::Bitfield => {
EnumBuilder::Bitfield {
codegen_depth: enum_codegen_depth,
canonical_name: name,
tokens: quote! {
#( #attrs )*
pub struct #ident (pub #repr);
},
}
}
EnumVariation::Rust => {
- let tokens = quote!{};
+ let tokens = quote!();
EnumBuilder::Rust {
codegen_depth: enum_codegen_depth + 1,
attrs,
ident,
tokens,
emitted_any_variants: false,
}
}
@@ -2203,17 +2244,17 @@ impl<'a> EnumBuilder<'a> {
pub type #ident = #repr;
}
],
codegen_depth: enum_codegen_depth,
}
}
EnumVariation::ModuleConsts => {
- let ident = quote::Ident::new(CONSTIFIED_ENUM_MODULE_REPR_NAME);
+ let ident = Term::new(CONSTIFIED_ENUM_MODULE_REPR_NAME, Span::call_site());
let type_definition = quote! {
#( #attrs )*
pub type #ident = #repr;
};
EnumBuilder::ModuleConsts {
codegen_depth: enum_codegen_depth + 1,
module_name: name,
@@ -2226,16 +2267,17 @@ impl<'a> EnumBuilder<'a> {
/// Add a variant to this enum.
fn with_variant<'b>(
self,
ctx: &BindgenContext,
variant: &EnumVariant,
mangling_prefix: Option<&str>,
rust_ty: quote::Tokens,
result: &mut CodegenResult<'b>,
+ is_ty_named: bool,
) -> Self {
let variant_name = ctx.rust_mangle(variant.name());
let expr = match variant.val() {
EnumVariantValue::Signed(v) => helpers::ast_ty::int_expr(v),
EnumVariantValue::Unsigned(v) => helpers::ast_ty::uint_expr(v),
};
let mut doc = quote! {};
@@ -2257,29 +2299,38 @@ impl<'a> EnumBuilder<'a> {
#tokens
#doc
#name = #expr,
},
emitted_any_variants: true,
}
}
- EnumBuilder::Bitfield { .. } => {
- let constant_name = match mangling_prefix {
- Some(prefix) => {
- Cow::Owned(format!("{}_{}", prefix, variant_name))
- }
- None => variant_name,
- };
-
- let ident = ctx.rust_ident(constant_name);
- result.push(quote! {
- #doc
- pub const #ident : #rust_ty = #rust_ty ( #expr );
- });
+ EnumBuilder::Bitfield { canonical_name, .. } => {
+ if ctx.options().rust_features().associated_const && is_ty_named {
+ let enum_ident = ctx.rust_ident(canonical_name);
+ let variant_ident = ctx.rust_ident(variant_name);
+ result.push(quote! {
+ impl #enum_ident {
+ #doc
+ pub const #variant_ident : #rust_ty = #rust_ty ( #expr );
+ }
+ });
+ } else {
+ let ident = ctx.rust_ident(match mangling_prefix {
+ Some(prefix) => {
+ Cow::Owned(format!("{}_{}", prefix, variant_name))
+ }
+ None => variant_name,
+ });
+ result.push(quote! {
+ #doc
+ pub const #ident : #rust_ty = #rust_ty ( #expr );
+ });
+ }
self
}
EnumBuilder::Consts {
..
} => {
let constant_name = match mangling_prefix {
@@ -2456,25 +2507,28 @@ impl CodeGenerator for Enum {
(true, 8) => "i64",
(false, 8) => "u64",
_ => {
warn!("invalid enum decl: signed: {}, size: {}", signed, size);
"i32"
}
};
- let variation = if self.is_bitfield(ctx, item) {
+ // ModuleConsts has higher precedence before Rust in order to avoid problems with
+ // overlapping match patterns
+ let variation = if self.is_constified_enum_module(ctx, item) {
+ EnumVariation::ModuleConsts
+ } else if self.is_bitfield(ctx, item) {
EnumVariation::Bitfield
} else if self.is_rustified_enum(ctx, item) {
EnumVariation::Rust
- } else if self.is_constified_enum_module(ctx, item) {
- EnumVariation::ModuleConsts
+ } else if self.is_constified_enum(ctx, item) {
+ EnumVariation::Consts
} else {
- // We generate consts by default
- EnumVariation::Consts
+ ctx.options().default_enum_style
};
let mut attrs = vec![];
// TODO(emilio): Delegate this to the builders?
if variation.is_rust() {
attrs.push(attributes::repr(repr_name));
} else if variation.is_bitfield() {
@@ -2490,28 +2544,28 @@ impl CodeGenerator for Enum {
&["Debug", "Copy", "Clone", "PartialEq", "Eq", "Hash"],
));
}
fn add_constant<'a>(
ctx: &BindgenContext,
enum_: &Type,
// Only to avoid recomputing every time.
- enum_canonical_name: "e::Ident,
+ enum_canonical_name: &Term,
// May be the same as "variant" if it's because the
// enum is unnamed and we still haven't seen the
// value.
variant_name: &str,
- referenced_name: "e::Ident,
+ referenced_name: &Term,
enum_rust_ty: quote::Tokens,
result: &mut CodegenResult<'a>,
) {
let constant_name = if enum_.name().is_some() {
if ctx.options().prepend_enum_name {
- format!("{}_{}", enum_canonical_name, variant_name)
+ format!("{}_{}", enum_canonical_name.as_str(), variant_name)
} else {
variant_name.into()
}
} else {
variant_name.into()
};
let constant_name = ctx.rust_ident(constant_name);
@@ -2530,17 +2584,17 @@ impl CodeGenerator for Enum {
&name,
attrs,
repr,
variation,
item.codegen_depth(ctx),
);
// A map where we keep a value -> variant relation.
- let mut seen_values = HashMap::<_, quote::Ident>::new();
+ let mut seen_values = HashMap::<_, Term>::new();
let enum_rust_ty = item.to_rust_ty_or_opaque(ctx, &());
let is_toplevel = item.is_toplevel(ctx);
// Used to mangle the constants we generate in the unnamed-enum case.
let parent_canonical_name = if is_toplevel {
None
} else {
Some(item.parent_id().canonical_name(ctx))
@@ -2602,63 +2656,66 @@ impl CodeGenerator for Enum {
);
} else {
builder = builder.with_variant(
ctx,
variant,
constant_mangling_prefix,
enum_rust_ty.clone(),
result,
+ enum_ty.name().is_some(),
);
}
}
Entry::Vacant(entry) => {
builder = builder.with_variant(
ctx,
variant,
constant_mangling_prefix,
enum_rust_ty.clone(),
result,
+ enum_ty.name().is_some(),
);
let variant_name = ctx.rust_ident(variant.name());
// If it's an unnamed enum, or constification is enforced,
// we also generate a constant so it can be properly
// accessed.
if (variation.is_rust() && enum_ty.name().is_none()) ||
variant.force_constification()
{
let mangled_name = if is_toplevel {
variant_name.clone()
} else {
let parent_name =
parent_canonical_name.as_ref().unwrap();
- quote::Ident::new(
- format!(
+ Term::new(
+ &format!(
"{}_{}",
parent_name,
- variant_name
- )
+ variant_name.as_str()
+ ),
+ Span::call_site()
)
};
add_constant(
ctx,
enum_ty,
&ident,
- mangled_name.as_ref(),
+ mangled_name.as_str(),
&variant_name,
enum_rust_ty.clone(),
result,
);
}
- entry.insert(quote::Ident::new(variant_name));
+ entry.insert(variant_name);
}
}
}
let item = builder.build(ctx, enum_rust_ty, result);
result.push(item);
}
}
@@ -2697,19 +2754,18 @@ trait TryToOpaque {
/// last resort, because C++ does not permit zero-sized types. See the note in
/// the `ToRustTyOrOpaque` doc comment about fallible versus infallible traits
/// and when each is appropriate.
///
/// Don't implement this directly. Instead implement `TryToOpaque`, and then
/// leverage the blanket impl for this trait.
trait ToOpaque: TryToOpaque {
fn get_layout(&self, ctx: &BindgenContext, extra: &Self::Extra) -> Layout {
- self.try_get_layout(ctx, extra).unwrap_or_else(
- |_| Layout::for_size(1),
- )
+ self.try_get_layout(ctx, extra)
+ .unwrap_or_else(|_| Layout::for_size(ctx, 1))
}
fn to_opaque(
&self,
ctx: &BindgenContext,
extra: &Self::Extra,
) -> quote::Tokens {
let layout = self.get_layout(ctx, extra);
@@ -2946,17 +3002,17 @@ impl TryToRustTy for Type {
Ok(quote! { [u64; 2] })
}
}
}
TypeKind::Float(fk) => Ok(float_kind_rust_type(ctx, fk)),
TypeKind::Complex(fk) => {
let float_path = float_kind_rust_type(ctx, fk);
- ctx.generated_bindegen_complex();
+ ctx.generated_bindgen_complex();
Ok(if ctx.options().enable_cxx_namespaces {
quote! {
root::__BindgenComplex<#float_path>
}
} else {
quote! {
__BindgenComplex<#float_path>
}
@@ -2976,29 +3032,27 @@ impl TryToRustTy for Type {
}
TypeKind::Array(item, len) => {
let ty = item.try_to_rust_ty(ctx, &())?;
Ok(quote! {
[ #ty ; #len ]
})
}
TypeKind::Enum(..) => {
- let mut tokens = quote! {};
let path = item.namespace_aware_canonical_path(ctx);
- tokens.append_separated(path.into_iter().map(quote::Ident::new), "::");
- Ok(tokens)
+ let path = Term::new(&path.join("::"), Span::call_site());
+ Ok(quote!(#path))
}
TypeKind::TemplateInstantiation(ref inst) => {
inst.try_to_rust_ty(ctx, item)
}
TypeKind::ResolvedTypeRef(inner) => inner.try_to_rust_ty(ctx, &()),
TypeKind::TemplateAlias(..) |
TypeKind::Alias(..) => {
let template_params = item.used_template_params(ctx)
- .unwrap_or(vec![])
.into_iter()
.filter(|param| param.is_template_param(ctx, &()))
.collect::<Vec<_>>();
let spelling = self.name().expect("Unnamed alias?");
if item.is_opaque(ctx, &()) && !template_params.is_empty() {
self.try_to_opaque(ctx, item)
} else if let Some(ty) = utils::type_from_named(
@@ -3007,36 +3061,36 @@ impl TryToRustTy for Type {
)
{
Ok(ty)
} else {
utils::build_path(item, ctx)
}
}
TypeKind::Comp(ref info) => {
- let template_params = item.used_template_params(ctx);
+ let template_params = item.all_template_params(ctx);
if info.has_non_type_template_params() ||
- (item.is_opaque(ctx, &()) && template_params.is_some())
+ (item.is_opaque(ctx, &()) && !template_params.is_empty())
{
return self.try_to_opaque(ctx, item);
}
utils::build_path(item, ctx)
}
TypeKind::Opaque => self.try_to_opaque(ctx, item),
TypeKind::BlockPointer => {
let void = raw_type(ctx, "c_void");
Ok(void.to_ptr(
/* is_const = */
false
))
}
TypeKind::Pointer(inner) |
TypeKind::Reference(inner) => {
- let is_const = self.is_const() || ctx.resolve_type(inner).is_const();
+ let is_const = ctx.resolve_type(inner).is_const();
let inner = inner.into_resolver().through_type_refs().resolve(ctx);
let inner_ty = inner.expect_type();
// Regardless if we can properly represent the inner type, we
// should always generate a proper pointer here, so use
// infallible conversion of the inner type.
let mut ty = inner.to_rust_ty_or_opaque(ctx, &());
@@ -3101,30 +3155,28 @@ impl TryToRustTy for TemplateInstantiati
let def = self.template_definition()
.into_resolver()
.through_type_refs()
.resolve(ctx);
let mut ty = quote! {};
let def_path = def.namespace_aware_canonical_path(ctx);
- ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), "::");
-
- let def_params = match def.self_template_params(ctx) {
- Some(params) => params,
- None => {
- // This can happen if we generated an opaque type for a partial
- // template specialization, and we've hit an instantiation of
- // that partial specialization.
- extra_assert!(
- def.is_opaque(ctx, &())
- );
- return Err(error::Error::InstantiationOfOpaqueType);
- }
- };
+ ty.append_separated(def_path.into_iter().map(|p| ctx.rust_ident(p)), Term::new("::", Span::call_site()));
+
+ let def_params = def.self_template_params(ctx);
+ if def_params.is_empty() {
+ // This can happen if we generated an opaque type for a partial
+ // template specialization, and we've hit an instantiation of
+ // that partial specialization.
+ extra_assert!(
+ def.is_opaque(ctx, &())
+ );
+ return Err(error::Error::InstantiationOfOpaqueType);
+ }
// TODO: If the definition type is a template class/struct
// definition's member template definition, it could rely on
// generic template parameters from its outer template
// class/struct. When we emit bindings for it, it could require
// *more* type arguments than we have here, and we will need to
// reconstruct them somehow. We don't have any means of doing
// that reconstruction at this time.
@@ -3162,17 +3214,17 @@ impl TryToRustTy for FunctionSig {
_: &(),
) -> error::Result<quote::Tokens> {
// TODO: we might want to consider ignoring the reference return value.
let ret = utils::fnsig_return_ty(ctx, &self);
let arguments = utils::fnsig_arguments(ctx, &self);
let abi = self.abi();
match abi {
- Abi::ThisCall if !ctx.options().rust_features().thiscall_abi() => {
+ Abi::ThisCall if !ctx.options().rust_features().thiscall_abi => {
warn!("Skipping function with thiscall ABI that isn't supported by the configured Rust target");
Ok(quote::Tokens::new())
}
_ => {
Ok(quote! {
unsafe extern #abi fn ( #( #arguments ),* ) #ret
})
}
@@ -3207,21 +3259,18 @@ impl CodeGenerator for Function {
}
_ => {},
}
// Similar to static member variables in a class template, we can't
// generate bindings to template functions, because the set of
// instantiations is open ended and we have no way of knowing which
// monomorphizations actually exist.
- let type_params = item.all_template_params(ctx);
- if let Some(params) = type_params {
- if !params.is_empty() {
- return;
- }
+ if !item.all_template_params(ctx).is_empty() {
+ return;
}
let name = self.name();
let mut canonical_name = item.canonical_name(ctx);
let mangled_name = self.mangled_name();
{
let seen_symbol_name = mangled_name.unwrap_or(&canonical_name);
@@ -3259,17 +3308,17 @@ impl CodeGenerator for Function {
// Handle overloaded functions by giving each overload its own unique
// suffix.
let times_seen = result.overload_number(&canonical_name);
if times_seen > 0 {
write!(&mut canonical_name, "{}", times_seen).unwrap();
}
let abi = match signature.abi() {
- Abi::ThisCall if !ctx.options().rust_features().thiscall_abi() => {
+ Abi::ThisCall if !ctx.options().rust_features().thiscall_abi => {
warn!("Skipping function with thiscall ABI that isn't supported by the configured Rust target");
return;
}
Abi::Unknown(unknown_abi) => {
panic!(
"Invalid or unknown abi {:?} for function {:?} ({:?})",
unknown_abi,
canonical_name,
@@ -3314,17 +3363,17 @@ fn objc_method_codegen(
};
let methods_and_args = method.format_method_call(&fn_args);
let body = if method.is_class_method() {
let class_name = class_name
.expect("Generating a class method without class name?")
.to_owned();
- let expect_msg = format!("Couldn't find {}", class_name);
+ let expect_msg = proc_macro2::Literal::string(&format!("Couldn't find {}", class_name));
quote! {
msg_send!(objc::runtime::Class::get(#class_name).expect(#expect_msg), #methods_and_args)
}
} else {
quote! {
msg_send!(self, #methods_and_args)
}
};
@@ -3440,21 +3489,22 @@ pub(crate) fn codegen(context: BindgenCo
mod utils {
use super::{ToRustTyOrOpaque, error};
use ir::context::BindgenContext;
use ir::function::FunctionSig;
use ir::item::{Item, ItemCanonicalPath};
use ir::ty::TypeKind;
use quote;
+ use proc_macro2::{Term, Span};
use std::mem;
pub fn prepend_bitfield_unit_type(result: &mut Vec<quote::Tokens>) {
- let mut bitfield_unit_type = quote! {};
- bitfield_unit_type.append(include_str!("./bitfield_unit.rs"));
+ let bitfield_unit_type = Term::new(include_str!("./bitfield_unit.rs"), Span::call_site());
+ let bitfield_unit_type = quote!(#bitfield_unit_type);
let items = vec![bitfield_unit_type];
let old_items = mem::replace(result, items);
result.extend(old_items);
}
pub fn prepend_objc_header(
ctx: &BindgenContext,
@@ -3670,20 +3720,22 @@ mod utils {
let old_items = mem::replace(result, items);
result.extend(old_items.into_iter());
}
pub fn build_path(
item: &Item,
ctx: &BindgenContext,
) -> error::Result<quote::Tokens> {
+ use proc_macro2::{Term, Span};
+
let path = item.namespace_aware_canonical_path(ctx);
-
- let mut tokens = quote! {};
- tokens.append_separated(path.into_iter().map(quote::Ident::new), "::");
+ let path = Term::new(&path.join("::"), Span::call_site());
+ let tokens = quote! {#path};
+ //tokens.append_separated(path, "::");
Ok(tokens)
}
fn primitive_ty(ctx: &BindgenContext, name: &str) -> quote::Tokens {
let ident = ctx.rust_ident_raw(name);
quote! {
#ident
--- a/third_party/rust/bindgen/src/codegen/struct_layout.rs
+++ b/third_party/rust/bindgen/src/codegen/struct_layout.rs
@@ -2,18 +2,18 @@
use super::helpers;
use ir::comp::CompInfo;
use ir::context::BindgenContext;
use ir::layout::Layout;
use ir::ty::{Type, TypeKind};
use quote;
+use proc_macro2::{Term, Span};
use std::cmp;
-use std::mem;
/// Trace the layout of struct.
#[derive(Debug)]
pub struct StructLayoutTracker<'a> {
name: &'a str,
ctx: &'a BindgenContext,
comp: &'a CompInfo,
is_packed: bool,
@@ -96,17 +96,17 @@ impl<'a> StructLayoutTracker<'a> {
max_field_align: 0,
last_field_was_bitfield: false,
}
}
pub fn saw_vtable(&mut self) {
debug!("saw vtable for {}", self.name);
- let ptr_size = mem::size_of::<*mut ()>();
+ let ptr_size = self.ctx.target_pointer_size();
self.latest_offset += ptr_size;
self.latest_field_layout = Some(Layout::new(ptr_size, ptr_size));
self.max_field_align = ptr_size;
}
pub fn saw_base(&mut self, base_ty: &Type) {
debug!("saw base for {}", self.name);
if let Some(layout) = base_ty.layout(self.ctx) {
@@ -160,25 +160,23 @@ impl<'a> StructLayoutTracker<'a> {
if let TypeKind::Array(inner, len) =
*field_ty.canonical_type(self.ctx).kind()
{
// FIXME(emilio): As an _ultra_ hack, we correct the layout returned
// by arrays of structs that have a bigger alignment than what we
// can support.
//
// This means that the structs in the array are super-unsafe to
- // access, since they won't be properly aligned, but *shrug*.
- if let Some(layout) = self.ctx.resolve_type(inner).layout(
- self.ctx,
- )
- {
- if layout.align > mem::size_of::<*mut ()>() {
- field_layout.size = align_to(layout.size, layout.align) *
- len;
- field_layout.align = mem::size_of::<*mut ()>();
+ // access, since they won't be properly aligned, but there's not too
+ // much we can do about it.
+ if let Some(layout) = self.ctx.resolve_type(inner).layout(self.ctx) {
+ if layout.align > self.ctx.target_pointer_size() {
+ field_layout.size =
+ align_to(layout.size, layout.align) * len;
+ field_layout.align = self.ctx.target_pointer_size();
}
}
}
let will_merge_with_bitfield = self.align_to_latest_field(field_layout);
let padding_layout = if self.is_packed {
None
@@ -188,17 +186,17 @@ impl<'a> StructLayoutTracker<'a> {
offset / 8 - self.latest_offset
}
_ if will_merge_with_bitfield || field_layout.align == 0 => 0,
_ => self.padding_bytes(field_layout),
};
// Otherwise the padding is useless.
let need_padding = padding_bytes >= field_layout.align ||
- field_layout.align > mem::size_of::<*mut ()>();
+ field_layout.align > self.ctx.target_pointer_size();
self.latest_offset += padding_bytes;
debug!(
"Offset: <padding>: {} -> {}",
self.latest_offset - padding_bytes,
self.latest_offset
);
@@ -210,17 +208,17 @@ impl<'a> StructLayoutTracker<'a> {
field_offset.unwrap_or(0) / 8,
padding_bytes,
field_layout
);
if need_padding && padding_bytes != 0 {
Some(Layout::new(
padding_bytes,
- cmp::min(field_layout.align, mem::size_of::<*mut ()>()),
+ cmp::min(field_layout.align, self.ctx.target_pointer_size())
))
} else {
None
}
};
self.latest_offset += field_layout.size;
self.latest_field_layout = Some(field_layout);
@@ -262,53 +260,63 @@ impl<'a> StructLayoutTracker<'a> {
// Note that if the last field we saw was a bitfield, we may need to pad
// regardless, because bitfields don't respect alignment as strictly as
// other fields.
if padding_bytes > 0 &&
(padding_bytes >= layout.align ||
(self.last_field_was_bitfield &&
padding_bytes >=
self.latest_field_layout.unwrap().align) ||
- layout.align > mem::size_of::<*mut ()>())
+ layout.align > self.ctx.target_pointer_size())
{
let layout = if self.is_packed {
Layout::new(padding_bytes, 1)
} else if self.last_field_was_bitfield ||
- layout.align > mem::size_of::<*mut ()>()
+ layout.align > self.ctx.target_pointer_size()
{
// We've already given up on alignment here.
- Layout::for_size(padding_bytes)
+ Layout::for_size(self.ctx, padding_bytes)
} else {
Layout::new(padding_bytes, layout.align)
};
debug!("pad bytes to struct {}, {:?}", self.name, layout);
Some(self.padding_field(layout))
} else {
None
}
}
pub fn requires_explicit_align(&self, layout: Layout) -> bool {
- self.max_field_align < layout.align &&
- layout.align <= mem::size_of::<*mut ()>()
+ if self.max_field_align >= layout.align {
+ return false;
+ }
+ // At this point we require explicit alignment, but we may not be able
+ // to generate the right bits, let's double check.
+ if self.ctx.options().rust_features().repr_align {
+ return true;
+ }
+
+ // We can only generate up-to a word of alignment unless we support
+ // repr(align).
+ layout.align <= self.ctx.target_pointer_size()
}
fn padding_bytes(&self, layout: Layout) -> usize {
align_to(self.latest_offset, layout.align) - self.latest_offset
}
fn padding_field(&mut self, layout: Layout) -> quote::Tokens {
let ty = helpers::blob(layout);
let padding_count = self.padding_count;
self.padding_count += 1;
- let padding_field_name = quote::Ident::new(format!("__bindgen_padding_{}", padding_count));
+ let padding_field_name = Term::new(&format!("__bindgen_padding_{}", padding_count), Span::call_site());
self.max_field_align = cmp::max(self.max_field_align, layout.align);
quote! {
pub #padding_field_name : #ty ,
}
}
--- a/third_party/rust/bindgen/src/features.rs
+++ b/third_party/rust/bindgen/src/features.rs
@@ -85,18 +85,22 @@ macro_rules! rust_target_values_def {
/// Defines macro which takes a macro
macro_rules! rust_target_base {
( $x_macro:ident ) => {
$x_macro!(
/// Rust stable 1.0
=> Stable_1_0 => 1.0;
/// Rust stable 1.19
=> Stable_1_19 => 1.19;
+ /// Rust stable 1.20
+ => Stable_1_20 => 1.20;
/// Rust stable 1.21
=> Stable_1_21 => 1.21;
+ /// Rust stable 1.25
+ => Stable_1_25 => 1.25;
/// Nightly rust
=> Nightly => nightly;
);
}
}
rust_target_base!(rust_target_def);
rust_target_base!(rust_target_values_def);
@@ -106,63 +110,69 @@ pub const LATEST_STABLE_RUST: RustTarget
/// Create RustFeatures struct definition, new(), and a getter for each field
macro_rules! rust_feature_def {
( $( $( #[$attr:meta] )* => $feature:ident; )* ) => {
/// Features supported by a rust target
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct RustFeatures {
$(
- $feature: bool,
+ $(
+ #[$attr]
+ )*
+ pub $feature: bool,
)*
}
impl RustFeatures {
/// Gives a RustFeatures struct with all features disabled
fn new() -> Self {
RustFeatures {
$(
$feature: false,
)*
}
}
-
- $(
- $(
- #[$attr]
- )*
- pub fn $feature(&self) -> bool {
- self.$feature
- }
- )*
}
}
}
rust_feature_def!(
/// Untagged unions ([RFC 1444](https://github.com/rust-lang/rfcs/blob/master/text/1444-union.md))
=> untagged_union;
/// `thiscall` calling convention ([Tracking issue](https://github.com/rust-lang/rust/issues/42202))
=> thiscall_abi;
/// builtin impls for `Clone` ([PR](https://github.com/rust-lang/rust/pull/43690))
=> builtin_clone_impls;
+ /// repr(align) https://github.com/rust-lang/rust/pull/47006
+ => repr_align;
+ /// associated constants https://github.com/rust-lang/rust/issues/29646
+ => associated_const;
);
impl From<RustTarget> for RustFeatures {
fn from(rust_target: RustTarget) -> Self {
let mut features = RustFeatures::new();
if rust_target >= RustTarget::Stable_1_19 {
features.untagged_union = true;
}
+ if rust_target >= RustTarget::Stable_1_20 {
+ features.associated_const = true;
+ }
+
if rust_target >= RustTarget::Stable_1_21 {
features.builtin_clone_impls = true;
}
+ if rust_target >= RustTarget::Stable_1_25 {
+ features.repr_align = true;
+ }
+
if rust_target >= RustTarget::Nightly {
features.thiscall_abi = true;
}
features
}
}
@@ -184,11 +194,12 @@ mod test {
assert_eq!(target, RustTarget::from_str(target_str).unwrap());
}
#[test]
fn str_to_target() {
test_target("1.0", RustTarget::Stable_1_0);
test_target("1.19", RustTarget::Stable_1_19);
test_target("1.21", RustTarget::Stable_1_21);
+ test_target("1.25", RustTarget::Stable_1_25);
test_target("nightly", RustTarget::Nightly);
}
}
--- a/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_copy.rs
@@ -229,30 +229,30 @@ impl<'ctx> MonotoneFramework for CannotD
// default, the may have an explicit destructor in C++, so we can't
// defer this check just for the union case.
if self.ctx.lookup_has_destructor(id.expect_type_id(self.ctx)) {
trace!(" comp has destructor which cannot derive copy");
return self.insert(id);
}
if info.kind() == CompKind::Union {
- if !self.ctx.options().rust_features().untagged_union() {
+ if !self.ctx.options().rust_features().untagged_union {
// NOTE: If there's no template parameters we can derive
// copy unconditionally, since arrays are magical for
// rustc, and __BindgenUnionField always implements
// copy.
trace!(
" comp can always derive debug if it's a Union and no template parameters"
);
return ConstrainResult::Same;
}
// https://github.com/rust-lang/rust/issues/36640
- if info.self_template_params(self.ctx).is_some() ||
- item.used_template_params(self.ctx).is_some()
+ if !info.self_template_params(self.ctx).is_empty() ||
+ !item.all_template_params(self.ctx).is_empty()
{
trace!(
" comp cannot derive copy because issue 36640"
);
return self.insert(id);
}
}
--- a/third_party/rust/bindgen/src/ir/analysis/derive_debug.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_debug.rs
@@ -145,17 +145,17 @@ impl<'ctx> MonotoneFramework for CannotD
};
if item.is_opaque(self.ctx, &()) {
let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_debug()
});
return if layout_can_derive &&
!(ty.is_union() &&
- self.ctx.options().rust_features().untagged_union()) {
+ self.ctx.options().rust_features().untagged_union) {
trace!(" we can trivially derive Debug for the layout");
ConstrainResult::Same
} else {
trace!(" we cannot derive Debug for the layout");
self.insert(id)
};
}
@@ -230,17 +230,17 @@ impl<'ctx> MonotoneFramework for CannotD
TypeKind::Comp(ref info) => {
assert!(
!info.has_non_type_template_params(),
"The early ty.is_opaque check should have handled this case"
);
if info.kind() == CompKind::Union {
- if self.ctx.options().rust_features().untagged_union() {
+ if self.ctx.options().rust_features().untagged_union {
trace!(" cannot derive Debug for Rust unions");
return self.insert(id);
}
if ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_debug()
})
{
--- a/third_party/rust/bindgen/src/ir/analysis/derive_default.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_default.rs
@@ -172,17 +172,17 @@ impl<'ctx> MonotoneFramework for CannotD
};
if item.is_opaque(self.ctx, &()) {
let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_default()
});
return if layout_can_derive &&
!(ty.is_union() &&
- self.ctx.options().rust_features().untagged_union()) {
+ self.ctx.options().rust_features().untagged_union) {
trace!(" we can trivially derive Default for the layout");
ConstrainResult::Same
} else {
trace!(" we cannot derive Default for the layout");
self.insert(id)
};
}
@@ -266,17 +266,17 @@ impl<'ctx> MonotoneFramework for CannotD
);
if info.is_forward_declaration() {
trace!(" cannot derive Default for forward decls");
return self.insert(id);
}
if info.kind() == CompKind::Union {
- if self.ctx.options().rust_features().untagged_union() {
+ if self.ctx.options().rust_features().untagged_union {
trace!(" cannot derive Default for Rust unions");
return self.insert(id);
}
if ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_default()
})
{
--- a/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_hash.rs
@@ -132,17 +132,17 @@ impl<'ctx> MonotoneFramework for CannotD
}
if item.is_opaque(self.ctx, &()) {
let layout_can_derive = ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_hash()
});
return if layout_can_derive &&
!(ty.is_union() &&
- self.ctx.options().rust_features().untagged_union()) {
+ self.ctx.options().rust_features().untagged_union) {
trace!(" we can trivially derive Hash for the layout");
ConstrainResult::Same
} else {
trace!(" we cannot derive Hash for the layout");
self.insert(id)
};
}
@@ -252,17 +252,17 @@ impl<'ctx> MonotoneFramework for CannotD
);
if info.is_forward_declaration() {
trace!(" cannot derive Hash for forward decls");
return self.insert(id);
}
if info.kind() == CompKind::Union {
- if self.ctx.options().rust_features().untagged_union() {
+ if self.ctx.options().rust_features().untagged_union {
trace!(" cannot derive Hash for Rust unions");
return self.insert(id);
}
if ty.layout(self.ctx).map_or(true, |l| {
l.opaque().can_trivially_derive_hash()
})
{
--- a/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/derive_partialeq_or_partialord.rs
@@ -114,17 +114,17 @@ impl<'ctx> CannotDerivePartialEqOrPartia
if self.ctx.no_partialeq_by_name(&item) {
return CanDerive::No;
}
trace!("ty: {:?}", ty);
if item.is_opaque(self.ctx, &()) {
if ty.is_union()
- && self.ctx.options().rust_features().untagged_union()
+ && self.ctx.options().rust_features().untagged_union
{
trace!(
" cannot derive `PartialEq`/`PartialOrd` for Rust unions"
);
return CanDerive::No;
}
let layout_can_derive = ty.layout(self.ctx)
@@ -237,17 +237,17 @@ impl<'ctx> CannotDerivePartialEqOrPartia
);
if info.is_forward_declaration() {
trace!(" cannot derive for forward decls");
return CanDerive::No;
}
if info.kind() == CompKind::Union {
- if self.ctx.options().rust_features().untagged_union() {
+ if self.ctx.options().rust_features().untagged_union {
trace!(
" cannot derive `PartialEq`/`PartialOrd` for Rust unions"
);
return CanDerive::No;
}
let layout_can_derive =
ty.layout(self.ctx).map_or(CanDerive::Yes, |l| {
--- a/third_party/rust/bindgen/src/ir/analysis/template_params.rs
+++ b/third_party/rust/bindgen/src/ir/analysis/template_params.rs
@@ -270,17 +270,17 @@ impl<'ctx> UsedTemplateParameters<'ctx>
used_by_this_id: &mut ItemSet,
instantiation: &TemplateInstantiation,
) {
trace!(" template instantiation");
let decl = self.ctx.resolve_type(instantiation.template_definition());
let args = instantiation.template_arguments();
- let params = decl.self_template_params(self.ctx).unwrap_or(vec![]);
+ let params = decl.self_template_params(self.ctx);
debug_assert!(this_id != instantiation.template_definition());
let used_by_def = self.used
.get(&instantiation.template_definition().into())
.expect("Should have a used entry for instantiation's template definition")
.as_ref()
.expect("And it should be Some because only this_id's set is None, and an \
instantiation's template definition should never be the \
@@ -414,17 +414,17 @@ impl<'ctx> MonotoneFramework for UsedTem
&TypeKind::TemplateInstantiation(ref inst) => {
let decl = ctx.resolve_type(inst.template_definition());
let args = inst.template_arguments();
// Although template definitions should always have
// template parameters, there is a single exception:
// opaque templates. Hence the unwrap_or.
let params =
- decl.self_template_params(ctx).unwrap_or(vec![]);
+ decl.self_template_params(ctx);
for (arg, param) in args.iter().zip(params.iter()) {
let arg = arg.into_resolver()
.through_type_aliases()
.through_type_refs()
.resolve(ctx)
.id();
--- a/third_party/rust/bindgen/src/ir/comment.rs
+++ b/third_party/rust/bindgen/src/ir/comment.rs
@@ -34,17 +34,17 @@ fn kind(comment: &str) -> Option<Kind> {
}
fn make_indent(indent: usize) -> String {
const RUST_INDENTATION: usize = 4;
iter::repeat(' ').take(indent * RUST_INDENTATION).collect()
}
-/// Preprocesses mulitple single line comments.
+/// Preprocesses multiple single line comments.
///
/// Handles lines starting with both `//` and `///`.
fn preprocess_single_lines(comment: &str, indent: usize) -> String {
debug_assert!(comment.starts_with("//"), "comment is not single line");
let indent = make_indent(indent);
let mut is_first = true;
let lines: Vec<_> = comment
--- a/third_party/rust/bindgen/src/ir/comp.rs
+++ b/third_party/rust/bindgen/src/ir/comp.rs
@@ -1285,19 +1285,20 @@ impl CompInfo {
//
// [1]: https://github.com/rust-lang-nursery/rust-bindgen/issues/482
let is_inner_struct = cur.semantic_parent() == cursor ||
cur.is_definition();
if !is_inner_struct {
return CXChildVisit_Continue;
}
+ // Even if this is a definition, we may not be the semantic
+ // parent, see #1281.
let inner = Item::parse(cur, Some(potential_id), ctx)
.expect("Inner ClassDecl");
- assert_eq!(ctx.resolve_item(inner).parent_id(), potential_id);
let inner = inner.expect_type_id(ctx);
ci.inner_types.push(inner);
// A declaration of an union or a struct without name could
// also be an unnamed field, unfortunately.
if cur.spelling().is_empty() &&
@@ -1350,17 +1351,17 @@ impl CompInfo {
// This used to not be here, but then I tried generating
// stylo bindings with this (without path filters), and
// cried a lot with a method in gfx/Point.h
// (ToUnknownPoint), that somehow was causing the same type
// to be inserted in the map two times.
//
// I couldn't make a reduced test case, but anyway...
- // Methods of template functions not only use to be inlined,
+ // Methods of template functions not only used to be inlined,
// but also instantiated, and we wouldn't be able to call
// them, so just bail out.
if !ci.template_params.is_empty() {
return CXChildVisit_Continue;
}
// NB: This gets us an owned `Function`, not a
// `FunctionSig`.
@@ -1538,17 +1539,17 @@ impl CompInfo {
}
/// Returns whether the current union can be represented as a Rust `union`
///
/// Requirements:
/// 1. Current RustTarget allows for `untagged_union`
/// 2. Each field can derive `Copy`
pub fn can_be_rust_union(&self, ctx: &BindgenContext) -> bool {
- if !ctx.options().rust_features().untagged_union() {
+ if !ctx.options().rust_features().untagged_union {
return false;
}
if self.is_forward_declaration() {
return false;
}
self.fields().iter().all(|f| match *f {
@@ -1663,34 +1664,29 @@ impl IsOpaque for CompInfo {
false
}
}
impl TemplateParameters for CompInfo {
fn self_template_params(
&self,
_ctx: &BindgenContext,
- ) -> Option<Vec<TypeId>> {
- if self.template_params.is_empty() {
- None
- } else {
- Some(self.template_params.clone())
- }
+ ) -> Vec<TypeId> {
+ self.template_params.clone()
}
}
impl Trace for CompInfo {
type Extra = Item;
fn trace<T>(&self, context: &BindgenContext, tracer: &mut T, item: &Item)
where
T: Tracer,
{
- let params = item.all_template_params(context).unwrap_or(vec![]);
- for p in params {
+ for p in item.all_template_params(context) {
tracer.visit_kind(p.into(), EdgeKind::TemplateParameterDefinition);
}
for ty in self.inner_types() {
tracer.visit_kind(ty.into(), EdgeKind::InnerType);
}
for &var in self.inner_vars() {
--- a/third_party/rust/bindgen/src/ir/context.rs
+++ b/third_party/rust/bindgen/src/ir/context.rs
@@ -19,17 +19,17 @@ use super::ty::{FloatKind, Type, TypeKin
use super::function::Function;
use super::super::time::Timer;
use BindgenOptions;
use callbacks::ParseCallbacks;
use cexpr;
use clang::{self, Cursor};
use clang_sys;
use parse::ClangItemParser;
-use quote;
+use proc_macro2::{Term, Span};
use std::borrow::Cow;
use std::cell::Cell;
use std::collections::{HashMap, HashSet, hash_map};
use std::collections::btree_map::{self, BTreeMap};
use std::iter::IntoIterator;
use std::mem;
/// An identifier for some kind of IR item.
@@ -361,21 +361,24 @@ pub struct BindgenContext {
in_codegen: bool,
/// The clang index for parsing.
index: clang::Index,
/// The translation unit for parsing.
translation_unit: clang::TranslationUnit,
+ /// Target information that can be useful for some stuff.
+ target_info: Option<clang::TargetInfo>,
+
/// The options given by the user via cli or other medium.
options: BindgenOptions,
/// Whether a bindgen complex was generated
- generated_bindegen_complex: Cell<bool>,
+ generated_bindgen_complex: Cell<bool>,
/// The set of `ItemId`s that are whitelisted. This the very first thing
/// computed after parsing our IR, and before running any of our analyses.
whitelisted: Option<ItemSet>,
/// The set of `ItemId`s that are whitelisted for code generation _and_ that
/// we should generate accounting for the codegen options.
///
@@ -498,16 +501,19 @@ impl<'ctx> WhitelistedItemsTraversal<'ct
{
WhitelistedItemsTraversal {
ctx,
traversal: ItemTraversal::new(ctx, roots, predicate),
}
}
}
+const HOST_TARGET: &'static str =
+ include_str!(concat!(env!("OUT_DIR"), "/host-target.txt"));
+
/// Returns the effective target, and whether it was explicitly specified on the
/// clang flags.
fn find_effective_target(clang_args: &[String]) -> (String, bool) {
use std::env;
for opt in clang_args {
if opt.starts_with("--target=") {
let mut split = opt.split('=');
@@ -516,18 +522,16 @@ fn find_effective_target(clang_args: &[S
}
}
// If we're running from a build script, try to find the cargo target.
if let Ok(t) = env::var("TARGET") {
return (t, false)
}
- const HOST_TARGET: &'static str =
- include_str!(concat!(env!("OUT_DIR"), "/host-target.txt"));
(HOST_TARGET.to_owned(), false)
}
impl BindgenContext {
/// Construct the context for the given `options`.
pub(crate) fn new(options: BindgenOptions) -> Self {
use clang_sys;
@@ -556,16 +560,27 @@ impl BindgenContext {
&index,
"",
&clang_args,
&options.input_unsaved_files,
parse_options,
).expect("TranslationUnit::parse failed")
};
+ let target_info = clang::TargetInfo::new(&translation_unit);
+
+ #[cfg(debug_assertions)]
+ {
+ if let Some(ref ti) = target_info {
+ if effective_target == HOST_TARGET {
+ assert_eq!(ti.pointer_width / 8, mem::size_of::<*mut ()>());
+ }
+ }
+ }
+
let root_module = Self::build_root_module(ItemId(0));
let root_module_id = root_module.id().as_module_id_unchecked();
let mut me = BindgenContext {
items: Default::default(),
types: Default::default(),
type_params: Default::default(),
modules: Default::default(),
@@ -573,20 +588,21 @@ impl BindgenContext {
root_module: root_module_id,
current_module: root_module_id,
semantic_parents: Default::default(),
currently_parsed_types: vec![],
parsed_macros: Default::default(),
replacements: Default::default(),
collected_typerefs: false,
in_codegen: false,
- index: index,
- translation_unit: translation_unit,
- options: options,
- generated_bindegen_complex: Cell::new(false),
+ index,
+ translation_unit,
+ target_info,
+ options,
+ generated_bindgen_complex: Cell::new(false),
whitelisted: None,
codegen_items: None,
used_template_parameters: None,
need_bitfield_allocation: Default::default(),
cannot_derive_debug: None,
cannot_derive_default: None,
cannot_derive_copy: None,
cannot_derive_copy_in_array: None,
@@ -606,16 +622,25 @@ impl BindgenContext {
/// Creates a timer for the current bindgen phase. If time_phases is `true`,
/// the timer will print to stderr when it is dropped, otherwise it will do
/// nothing.
pub fn timer<'a>(&self, name: &'a str) -> Timer<'a> {
Timer::new(name).with_output(self.options.time_phases)
}
+ /// Returns the pointer width to use for the target for the current
+ /// translation.
+ pub fn target_pointer_size(&self) -> usize {
+ if let Some(ref ti) = self.target_info {
+ return ti.pointer_width / 8;
+ }
+ mem::size_of::<*mut ()>()
+ }
+
/// Get the stack of partially parsed types that we are in the middle of
/// parsing.
pub fn currently_parsed_types(&self) -> &[PartialType] {
&self.currently_parsed_types[..]
}
/// Begin parsing the given partial type, and push it onto the
/// `currently_parsed_types` stack so that we won't infinite recurse if we
@@ -875,29 +900,29 @@ impl BindgenContext {
s = s.replace("$", "_");
s.push_str("_");
return Cow::Owned(s);
}
Cow::Borrowed(name)
}
/// Returns a mangled name as a rust identifier.
- pub fn rust_ident<S>(&self, name: S) -> quote::Ident
+ pub fn rust_ident<S>(&self, name: S) -> Term
where
S: AsRef<str>
{
self.rust_ident_raw(self.rust_mangle(name.as_ref()))
}
/// Returns a mangled name as a rust identifier.
- pub fn rust_ident_raw<T>(&self, name: T) -> quote::Ident
+ pub fn rust_ident_raw<T>(&self, name: T) -> Term
where
- T: Into<quote::Ident>
+ T: AsRef<str>
{
- name.into()
+ Term::new(name.as_ref(), Span::call_site())
}
/// Iterate over all items that have been defined.
pub fn items<'a>(&'a self) -> btree_map::Iter<'a, ItemId, Item> {
self.items.iter()
}
/// Have we collected all unresolved type references yet?
@@ -1336,20 +1361,17 @@ impl BindgenContext {
let used_params = analyze::<UsedTemplateParameters>(self);
self.used_template_parameters = Some(used_params);
} else {
// If you aren't recursively whitelisting, then we can't really make
// any sense of template parameter usage, and you're on your own.
let mut used_params = HashMap::new();
for &id in self.whitelisted_items() {
used_params.entry(id).or_insert(
- id.self_template_params(self).map_or(
- Default::default(),
- |params| params.into_iter().map(|p| p.into()).collect(),
- ),
+ id.self_template_params(self).into_iter().map(|p| p.into()).collect()
);
}
self.used_template_parameters = Some(used_params);
}
}
/// Return `true` if `item` uses the given `template_param`, `false`
/// otherwise.
@@ -1511,36 +1533,37 @@ impl BindgenContext {
/// Given a cursor pointing to the location of a template instantiation,
/// return a tuple of the form `(declaration_cursor, declaration_id,
/// num_expected_template_args)`.
///
/// Note that `declaration_id` is not guaranteed to be in the context's item
/// set! It is possible that it is a partial type that we are still in the
- /// middle of parsign.
+ /// middle of parsing.
fn get_declaration_info_for_template_instantiation(
&self,
instantiation: &Cursor,
) -> Option<(Cursor, ItemId, usize)> {
instantiation
.cur_type()
.canonical_declaration(Some(instantiation))
.and_then(|canon_decl| {
self.get_resolved_type(&canon_decl).and_then(
|template_decl_id| {
- template_decl_id.num_self_template_params(self).map(
- |num_params| {
- (
- *canon_decl.cursor(),
- template_decl_id.into(),
- num_params,
- )
- },
- )
+ let num_params = template_decl_id.num_self_template_params(self);
+ if num_params == 0 {
+ None
+ } else {
+ Some((
+ *canon_decl.cursor(),
+ template_decl_id.into(),
+ num_params,
+ ))
+ }
},
)
})
.or_else(|| {
// If we haven't already parsed the declaration of
// the template being instantiated, then it *must*
// be on the stack of types we are currently
// parsing. If it wasn't then clang would have
@@ -1551,25 +1574,26 @@ impl BindgenContext {
.referenced()
.and_then(|referenced| {
self.currently_parsed_types()
.iter()
.find(|partial_ty| *partial_ty.decl() == referenced)
.cloned()
})
.and_then(|template_decl| {
- template_decl.num_self_template_params(self).map(
- |num_template_params| {
- (
- *template_decl.decl(),
- template_decl.id(),
- num_template_params,
- )
- },
- )
+ let num_template_params = template_decl.num_self_template_params(self);
+ if num_template_params == 0 {
+ None
+ } else {
+ Some((
+ *template_decl.decl(),
+ template_decl.id(),
+ num_template_params,
+ ))
+ }
})
})
}
/// Parse a template instantiation, eg `Foo<int>`.
///
/// This is surprisingly difficult to do with libclang, due to the fact that
/// it doesn't provide explicit template argument information, except for
@@ -1606,27 +1630,24 @@ impl BindgenContext {
&mut self,
with_id: ItemId,
template: TypeId,
ty: &clang::Type,
location: clang::Cursor,
) -> Option<TypeId> {
use clang_sys;
- let num_expected_args = match self.resolve_type(template)
- .num_self_template_params(self) {
- Some(n) => n,
- None => {
- warn!(
- "Tried to instantiate a template for which we could not \
- determine any template parameters"
- );
- return None;
- }
- };
+ let num_expected_args = self.resolve_type(template).num_self_template_params(self);
+ if num_expected_args == 0 {
+ warn!(
+ "Tried to instantiate a template for which we could not \
+ determine any template parameters"
+ );
+ return None;
+ }
let mut args = vec![];
let mut found_const_arg = false;
let mut children = location.collect_children();
if children.iter().all(|c| !c.has_children()) {
// This is insanity... If clang isn't giving us a properly nested
// AST for which template arguments belong to which template we are
@@ -1888,30 +1909,65 @@ impl BindgenContext {
debug!("Not resolved, maybe builtin?");
self.build_builtin_ty(ty)
}
/// Make a new item that is a resolved type reference to the `wrapped_id`.
///
/// This is unfortunately a lot of bloat, but is needed to properly track
- /// constness et. al.
+ /// constness et al.
///
/// We should probably make the constness tracking separate, so it doesn't
/// bloat that much, but hey, we already bloat the heck out of builtin
/// types.
pub fn build_ty_wrapper(
&mut self,
with_id: ItemId,
wrapped_id: TypeId,
parent_id: Option<ItemId>,
ty: &clang::Type,
) -> TypeId {
+ self.build_wrapper(
+ with_id,
+ wrapped_id,
+ parent_id,
+ ty,
+ ty.is_const(),
+ )
+ }
+
+ /// A wrapper over a type that adds a const qualifier explicitly.
+ ///
+ /// Needed to handle const methods in C++, wrapping the type .
+ pub fn build_const_wrapper(
+ &mut self,
+ with_id: ItemId,
+ wrapped_id: TypeId,
+ parent_id: Option<ItemId>,
+ ty: &clang::Type,
+ ) -> TypeId {
+ self.build_wrapper(
+ with_id,
+ wrapped_id,
+ parent_id,
+ ty,
+ /* is_const = */ true,
+ )
+ }
+
+ fn build_wrapper(
+ &mut self,
+ with_id: ItemId,
+ wrapped_id: TypeId,
+ parent_id: Option<ItemId>,
+ ty: &clang::Type,
+ is_const: bool,
+ ) -> TypeId {
let spelling = ty.spelling();
- let is_const = ty.is_const();
let layout = ty.fallible_layout().ok();
let type_kind = TypeKind::ResolvedTypeRef(wrapped_id);
let ty = Type::new(Some(spelling), layout, type_kind, is_const);
let item = Item::new(
with_id,
None,
None,
parent_id.unwrap_or(self.current_module.into()),
@@ -2286,17 +2342,21 @@ impl BindgenContext {
roots.reverse();
roots
};
let whitelisted_items_predicate =
if self.options().whitelist_recursively {
traversal::all_edges
} else {
- traversal::no_edges
+ // Only follow InnerType edges from the whitelisted roots.
+ // Such inner types (e.g. anonymous structs/unions) are
+ // always emitted by codegen, and they need to be whitelisted
+ // to make sure they are processed by e.g. the derive analysis.
+ traversal::only_inner_type_edges
};
let whitelisted = WhitelistedItemsTraversal::new(
self,
roots.clone(),
whitelisted_items_predicate,
).collect::<ItemSet>();
@@ -2311,32 +2371,32 @@ impl BindgenContext {
};
self.whitelisted = Some(whitelisted);
self.codegen_items = Some(codegen_items);
}
/// Convenient method for getting the prefix to use for most traits in
/// codegen depending on the `use_core` option.
- pub fn trait_prefix(&self) -> quote::Ident {
+ pub fn trait_prefix(&self) -> Term {
if self.options().use_core {
self.rust_ident_raw("core")
} else {
self.rust_ident_raw("std")
}
}
- /// Call if a binden complex is generated
- pub fn generated_bindegen_complex(&self) {
- self.generated_bindegen_complex.set(true)
+ /// Call if a bindgen complex is generated
+ pub fn generated_bindgen_complex(&self) {
+ self.generated_bindgen_complex.set(true)
}
- /// Whether we need to generate the binden complex type
- pub fn need_bindegen_complex_type(&self) -> bool {
- self.generated_bindegen_complex.get()
+ /// Whether we need to generate the bindgen complex type
+ pub fn need_bindgen_complex_type(&self) -> bool {
+ self.generated_bindgen_complex.get()
}
/// Compute whether we can derive debug.
fn compute_cannot_derive_debug(&mut self) {
let _t = self.timer("compute_cannot_derive_debug");
assert!(self.cannot_derive_debug.is_none());
if self.options.derive_debug {
self.cannot_derive_debug = Some(analyze::<CannotDeriveDebug>(self));
@@ -2498,17 +2558,17 @@ impl BindgenContext {
}
/// Check if `--no-copy` flag is enabled for this item.
pub fn no_copy_by_name(&self, item: &Item) -> bool {
let name = item.canonical_path(self)[1..].join("::");
self.options().no_copy_types.matches(&name)
}
- /// Chech if `--no-hash` flag is enabled for this item.
+ /// Check if `--no-hash` flag is enabled for this item.
pub fn no_hash_by_name(&self, item: &Item) -> bool {
let name = item.canonical_path(self)[1..].join("::");
self.options().no_hash_types.matches(&name)
}
}
/// A builder struct for configuring item resolution options.
#[derive(Debug, Copy, Clone)]
@@ -2613,23 +2673,23 @@ impl PartialType {
self.id
}
}
impl TemplateParameters for PartialType {
fn self_template_params(
&self,
_ctx: &BindgenContext,
- ) -> Option<Vec<TypeId>> {
+ ) -> Vec<TypeId> {
// Maybe at some point we will eagerly parse named types, but for now we
// don't and this information is unavailable.
- None
+ vec![]
}
- fn num_self_template_params(&self, _ctx: &BindgenContext) -> Option<usize> {
+ fn num_self_template_params(&self, _ctx: &BindgenContext) -> usize {
// Wouldn't it be nice if libclang would reliably give us this
// information‽
match self.decl().kind() {
clang_sys::CXCursor_ClassTemplate |
clang_sys::CXCursor_FunctionTemplate |
clang_sys::CXCursor_TypeAliasTemplateDecl => {
let mut num_params = 0;
self.decl().visit(|c| {
@@ -2638,14 +2698,14 @@ impl TemplateParameters for PartialType
clang_sys::CXCursor_TemplateTemplateParameter |
clang_sys::CXCursor_NonTypeTemplateParameter => {
num_params += 1;
}
_ => {}
};
clang_sys::CXChildVisit_Continue
});
- Some(num_params)
+ num_params
}
- _ => None,
+ _ => 0,
}
}
}
--- a/third_party/rust/bindgen/src/ir/derive.rs
+++ b/third_party/rust/bindgen/src/ir/derive.rs
@@ -38,17 +38,17 @@ pub trait CanTriviallyDeriveDebug {
pub trait CanDeriveCopy<'a> {
/// Return `true` if `Copy` can be derived for this thing, `false`
/// otherwise.
fn can_derive_copy(&'a self, ctx: &'a BindgenContext) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can trivially
/// derive `Copy` without looking at any other types or results of fix point
-/// analsyses. This is a helper trait for fix point analysis.
+/// analyses. This is a helper trait for fix point analysis.
pub trait CanTriviallyDeriveCopy {
/// Return `true` if `Copy` can be trivially derived for this thing, `false`
/// otherwise.
fn can_trivially_derive_copy(&self) -> bool;
}
/// A trait that encapsulates the logic for whether or not we can derive
/// `Default` for a given thing.
--- a/third_party/rust/bindgen/src/ir/enum_ty.rs
+++ b/third_party/rust/bindgen/src/ir/enum_ty.rs
@@ -2,16 +2,17 @@
use super::context::{BindgenContext, TypeId};
use super::item::Item;
use super::ty::TypeKind;
use clang;
use ir::annotations::Annotations;
use ir::item::ItemCanonicalPath;
use parse::{ClangItemParser, ParseError};
+use regex_set::RegexSet;
/// An enum representing custom handling that can be given to a variant.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum EnumVariantCustomBehavior {
/// This variant will be a module containing constants.
ModuleConstify,
/// This variant will be constified, that is, forced to generate a constant.
Constify,
@@ -132,54 +133,46 @@ impl Enum {
));
}
}
CXChildVisit_Continue
});
Ok(Enum::new(repr, variants))
}
- /// Whether the enum should be a bitfield
- pub fn is_bitfield(&self, ctx: &BindgenContext, item: &Item) -> bool {
+ fn is_matching_enum(&self, ctx: &BindgenContext, enums: &RegexSet, item: &Item) -> bool {
let path = item.canonical_path(ctx);
let enum_ty = item.expect_type();
- ctx.options().bitfield_enums.matches(&path[1..].join("::")) ||
- (enum_ty.name().is_none() &&
- self.variants().iter().any(|v| {
- ctx.options().bitfield_enums.matches(&v.name())
- }))
+ let path_matches = enums.matches(&path[1..].join("::"));
+ let enum_is_anon = enum_ty.name().is_none();
+ let a_variant_matches = self.variants().iter().any(|v| {
+ enums.matches(&v.name())
+ });
+ path_matches || (enum_is_anon && a_variant_matches)
+ }
+
+ /// Whether the enum should be a bitfield
+ pub fn is_bitfield(&self, ctx: &BindgenContext, item: &Item) -> bool {
+ self.is_matching_enum(ctx, &ctx.options().bitfield_enums, item)
}
/// Whether the enum should be an constified enum module
- pub fn is_constified_enum_module(
- &self,
- ctx: &BindgenContext,
- item: &Item,
- ) -> bool {
- let path = item.canonical_path(ctx);
- let enum_ty = item.expect_type();
+ pub fn is_constified_enum_module(&self, ctx: &BindgenContext, item: &Item) -> bool {
+ self.is_matching_enum(ctx, &ctx.options().constified_enum_modules, item)
+ }
- ctx.options().constified_enum_modules.matches(&path[1..].join("::")) ||
- (enum_ty.name().is_none() &&
- self.variants().iter().any(|v| {
- ctx.options().constified_enum_modules.matches(&v.name())
- }))
+ /// Whether the enum should be an set of constants
+ pub fn is_constified_enum(&self, ctx: &BindgenContext, item: &Item) -> bool {
+ self.is_matching_enum(ctx, &ctx.options().constified_enums, item)
}
/// Whether the enum should be a Rust enum
pub fn is_rustified_enum(&self, ctx: &BindgenContext, item: &Item) -> bool {
- let path = item.canonical_path(ctx);
- let enum_ty = item.expect_type();
-
- ctx.options().rustified_enums.matches(&path[1..].join("::")) ||
- (enum_ty.name().is_none() &&
- self.variants().iter().any(|v| {
- ctx.options().rustified_enums.matches(&v.name())
- }))
+ self.is_matching_enum(ctx, &ctx.options().rustified_enums, item)
}
}
/// A single enum variant, to be contained only in an enum.
#[derive(Debug)]
pub struct EnumVariant {
/// The name of the variant.
name: String,
--- a/third_party/rust/bindgen/src/ir/function.rs
+++ b/third_party/rust/bindgen/src/ir/function.rs
@@ -188,17 +188,17 @@ impl Abi {
Abi::Unknown(..) => true,
_ => false,
}
}
}
impl quote::ToTokens for Abi {
fn to_tokens(&self, tokens: &mut quote::Tokens) {
- tokens.append(match *self {
+ tokens.append_all(match *self {
Abi::C => quote! { "C" },
Abi::Stdcall => quote! { "stdcall" },
Abi::Fastcall => quote! { "fastcall" },
Abi::ThisCall => quote! { "thiscall" },
Abi::Aapcs => quote! { "aapcs" },
Abi::Win64 => quote! { "win64" },
Abi::Unknown(cc) => panic!(
"Cannot turn unknown calling convention to tokens: {:?}",
@@ -397,24 +397,37 @@ impl FunctionSig {
return Err(ParseError::Continue);
}
if is_method || is_constructor || is_destructor {
let is_const = is_method && cursor.method_is_const();
let is_virtual = is_method && cursor.method_is_virtual();
let is_static = is_method && cursor.method_is_static();
if !is_static && !is_virtual {
- let class = Item::parse(cursor.semantic_parent(), None, ctx)
+ let parent = cursor.semantic_parent();
+ let class = Item::parse(parent, None, ctx)
.expect("Expected to parse the class");
// The `class` most likely is not finished parsing yet, so use
// the unchecked variant.
let class = class.as_type_id_unchecked();
+ let class = if is_const {
+ let const_class_id = ctx.next_item_id();
+ ctx.build_const_wrapper(
+ const_class_id,
+ class,
+ None,
+ &parent.cur_type(),
+ )
+ } else {
+ class
+ };
+
let ptr =
- Item::builtin_type(TypeKind::Pointer(class), is_const, ctx);
+ Item::builtin_type(TypeKind::Pointer(class), false, ctx);
args.insert(0, (Some("this".into()), ptr));
} else if is_virtual {
let void = Item::builtin_type(TypeKind::Void, false, ctx);
let ptr =
Item::builtin_type(TypeKind::Pointer(void), false, ctx);
args.insert(0, (Some("this".into()), ptr));
}
}
--- a/third_party/rust/bindgen/src/ir/item.rs
+++ b/third_party/rust/bindgen/src/ir/item.rs
@@ -1107,45 +1107,45 @@ impl DotAttributes for Item {
impl<T> TemplateParameters for T
where
T: Copy + Into<ItemId>
{
fn self_template_params(
&self,
ctx: &BindgenContext,
- ) -> Option<Vec<TypeId>> {
- ctx.resolve_item_fallible(*self).and_then(|item| {
+ ) -> Vec<TypeId> {
+ ctx.resolve_item_fallible(*self).map_or(vec![], |item| {
item.self_template_params(ctx)
})
}
}
impl TemplateParameters for Item {
fn self_template_params(
&self,
ctx: &BindgenContext,
- ) -> Option<Vec<TypeId>> {
+ ) -> Vec<TypeId> {
self.kind.self_template_params(ctx)
}
}
impl TemplateParameters for ItemKind {
fn self_template_params(
&self,
ctx: &BindgenContext,
- ) -> Option<Vec<TypeId>> {
+ ) -> Vec<TypeId> {
match *self {
ItemKind::Type(ref ty) => ty.self_template_params(ctx),
// If we start emitting bindings to explicitly instantiated
// functions, then we'll need to check ItemKind::Function for
// template params.
ItemKind::Function(_) |
ItemKind::Module(_) |
- ItemKind::Var(_) => None,
+ ItemKind::Var(_) => vec![],
}
}
}
// An utility function to handle recursing inside nested types.
fn visit_child(
cur: clang::Cursor,
id: ItemId,
@@ -1444,18 +1444,18 @@ impl ClangItemParser for Item {
{
if ty.is_associated_type() ||
location.cur_type().is_associated_type()
{
return Ok(Item::new_opaque_type(id, ty, ctx));
}
- if let Some(id) = Item::type_param(Some(id), location, ctx) {
- return Ok(id);
+ if let Some(param_id) = Item::type_param(None, location, ctx) {
+ return Ok(ctx.build_ty_wrapper(id, param_id, None, ty));
}
}
let decl = {
let decl = ty.declaration();
decl.definition().unwrap_or(decl)
};
--- a/third_party/rust/bindgen/src/ir/layout.rs
+++ b/third_party/rust/bindgen/src/ir/layout.rs
@@ -1,65 +1,73 @@
//! Intermediate representation for the physical layout of some type.
use super::derive::{CanTriviallyDeriveCopy, CanTriviallyDeriveDebug,
CanTriviallyDeriveDefault, CanTriviallyDeriveHash,
CanTriviallyDerivePartialEqOrPartialOrd, CanDerive};
use super::ty::{RUST_DERIVE_IN_ARRAY_LIMIT, Type, TypeKind};
+use ir::context::BindgenContext;
use clang;
-use std::{cmp, mem};
+use std::cmp;
/// A type that represents the struct layout of a type.
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct Layout {
/// The size (in bytes) of this layout.
pub size: usize,
/// The alignment (in bytes) of this layout.
pub align: usize,
/// Whether this layout's members are packed or not.
pub packed: bool,
}
#[test]
fn test_layout_for_size() {
+ use std::mem;
+
let ptr_size = mem::size_of::<*mut ()>();
- assert_eq!(Layout::for_size(ptr_size), Layout::new(ptr_size, ptr_size));
assert_eq!(
- Layout::for_size(3 * ptr_size),
+ Layout::for_size_internal(ptr_size, ptr_size),
+ Layout::new(ptr_size, ptr_size)
+ );
+ assert_eq!(
+ Layout::for_size_internal(ptr_size, 3 * ptr_size),
Layout::new(3 * ptr_size, ptr_size)
);
}
impl Layout {
/// Construct a new `Layout` with the given `size` and `align`. It is not
/// packed.
pub fn new(size: usize, align: usize) -> Self {
Layout {
size,
align,
packed: false,
}
}
- /// Creates a non-packed layout for a given size, trying to use the maximum
- /// alignment possible.
- pub fn for_size(size: usize) -> Self {
+ fn for_size_internal(ptr_size: usize, size: usize) -> Self {
let mut next_align = 2;
- while size % next_align == 0 &&
- next_align <= mem::size_of::<*mut ()>()
- {
+ while size % next_align == 0 && next_align <= ptr_size {
next_align *= 2;
}
Layout {
size: size,
align: next_align / 2,
packed: false,
}
}
+ /// Creates a non-packed layout for a given size, trying to use the maximum
+ /// alignment possible.
+ pub fn for_size(ctx: &BindgenContext, size: usize) -> Self {
+ Self::for_size_internal(ctx.target_pointer_size(), size)
+ }
+
/// Is this a zero-sized layout?
pub fn is_zero(&self) -> bool {
self.size == 0 && self.align == 0
}
/// Construct a zero-sized layout.
pub fn zero() -> Self {
Self::new(0, 0)
@@ -75,17 +83,18 @@ impl Layout {
#[derive(Clone, Debug, PartialEq)]
pub struct Opaque(pub Layout);
impl Opaque {
/// Construct a new opaque type from the given clang type.
pub fn from_clang_ty(ty: &clang::Type) -> Type {
let layout = Layout::new(ty.size(), ty.align());
let ty_kind = TypeKind::Opaque;
- Type::new(None, Some(layout), ty_kind, false)
+ let is_const = ty.is_const();
+ Type::new(None, Some(layout), ty_kind, is_const)
}
/// Return the known rust type we should use to create a correctly-aligned
/// field with this layout.
pub fn known_rust_type_for_array(&self) -> Option<&'static str> {
Some(match self.0.align {
8 => "u64",
4 => "u32",
--- a/third_party/rust/bindgen/src/ir/objc.rs
+++ b/third_party/rust/bindgen/src/ir/objc.rs
@@ -8,16 +8,17 @@ use clang;
use clang_sys::CXChildVisit_Continue;
use clang_sys::CXCursor_ObjCCategoryDecl;
use clang_sys::CXCursor_ObjCClassMethodDecl;
use clang_sys::CXCursor_ObjCClassRef;
use clang_sys::CXCursor_ObjCInstanceMethodDecl;
use clang_sys::CXCursor_ObjCProtocolDecl;
use clang_sys::CXCursor_ObjCProtocolRef;
use quote;
+use proc_macro2::{Term, Span};
/// Objective C interface as used in TypeKind
///
/// Also protocols and categories are parsed as this type
#[derive(Debug)]
pub struct ObjCInterface {
/// The name
/// like, NSObject
@@ -211,17 +212,17 @@ impl ObjCMethod {
self.is_class_method
}
/// Formats the method call
pub fn format_method_call(&self, args: &[quote::Tokens]) -> quote::Tokens {
let split_name: Vec<_> = self.name
.split(':')
.filter(|p| !p.is_empty())
- .map(quote::Ident::new)
+ .map(|name| Term::new(name, Span::call_site()))
.collect();
// No arguments
if args.len() == 0 && split_name.len() == 1 {
let name = &split_name[0];
return quote! {
#name
};
@@ -234,19 +235,20 @@ impl ObjCMethod {
args,
split_name
);
}
// Get arguments without type signatures to pass to `msg_send!`
let mut args_without_types = vec![];
for arg in args.iter() {
- let name_and_sig: Vec<&str> = arg.as_str().split(' ').collect();
+ let arg = arg.to_string();
+ let name_and_sig: Vec<&str> = arg.split(' ').collect();
let name = name_and_sig[0];
- args_without_types.push(quote::Ident::new(name))
+ args_without_types.push(Term::new(name, Span::call_site()))
};
let args = split_name
.into_iter()
.zip(args_without_types)
.map(|(arg, arg_val)| quote! { #arg : #arg_val });
quote! {
--- a/third_party/rust/bindgen/src/ir/template.rs
+++ b/third_party/rust/bindgen/src/ir/template.rs
@@ -76,112 +76,93 @@ use parse::ClangItemParser;
/// ```
///
/// The following table depicts the results of each trait method when invoked on
/// each of the declarations above:
///
/// +------+----------------------+--------------------------+------------------------+----
/// |Decl. | self_template_params | num_self_template_params | all_template_parameters| ...
/// +------+----------------------+--------------------------+------------------------+----
-/// |Foo | Some([T, U]) | Some(2) | Some([T, U]) | ...
-/// |Bar | Some([V]) | Some(1) | Some([T, U, V]) | ...
-/// |Inner | None | None | Some([T, U]) | ...
-/// |Lol | Some([W]) | Some(1) | Some([T, U, W]) | ...
-/// |Wtf | Some([X]) | Some(1) | Some([T, U, X]) | ...
-/// |Qux | None | None | None | ...
+/// |Foo | [T, U] | 2 | [T, U] | ...
+/// |Bar | [V] | 1 | [T, U, V] | ...
+/// |Inner | [] | 0 | [T, U] | ...
+/// |Lol | [W] | 1 | [T, U, W] | ...
+/// |Wtf | [X] | 1 | [T, U, X] | ...
+/// |Qux | [] | 0 | [] | ...
/// +------+----------------------+--------------------------+------------------------+----
///
/// ----+------+-----+----------------------+
/// ... |Decl. | ... | used_template_params |
/// ----+------+-----+----------------------+
-/// ... |Foo | ... | Some([T, U]) |
-/// ... |Bar | ... | Some([V]) |
-/// ... |Inner | ... | None |
-/// ... |Lol | ... | Some([T]) |
-/// ... |Wtf | ... | Some([T]) |
-/// ... |Qux | ... | None |
+/// ... |Foo | ... | [T, U] |
+/// ... |Bar | ... | [V] |
+/// ... |Inner | ... | [] |
+/// ... |Lol | ... | [T] |
+/// ... |Wtf | ... | [T] |
+/// ... |Qux | ... | [] |
/// ----+------+-----+----------------------+
pub trait TemplateParameters {
/// Get the set of `ItemId`s that make up this template declaration's free
/// template parameters.
///
/// Note that these might *not* all be named types: C++ allows
/// constant-value template parameters as well as template-template
/// parameters. Of course, Rust does not allow generic parameters to be
/// anything but types, so we must treat them as opaque, and avoid
/// instantiating them.
fn self_template_params(&self, ctx: &BindgenContext)
- -> Option<Vec<TypeId>>;
+ -> Vec<TypeId>;
/// Get the number of free template parameters this template declaration
/// has.
- ///
- /// Implementations *may* return `Some` from this method when
- /// `template_params` returns `None`. This is useful when we only have
- /// partial information about the template declaration, such as when we are
- /// in the middle of parsing it.
- fn num_self_template_params(&self, ctx: &BindgenContext) -> Option<usize> {
- self.self_template_params(ctx).map(|params| params.len())
+ fn num_self_template_params(&self, ctx: &BindgenContext) -> usize {
+ self.self_template_params(ctx).len()
}
/// Get the complete set of template parameters that can affect this
/// declaration.
///
/// Note that this item doesn't need to be a template declaration itself for
/// `Some` to be returned here (in contrast to `self_template_params`). If
/// this item is a member of a template declaration, then the parent's
/// template parameters are included here.
///
/// In the example above, `Inner` depends on both of the `T` and `U` type
/// parameters, even though it is not itself a template declaration and
/// therefore has no type parameters itself. Perhaps it helps to think about
/// how we would fully reference such a member type in C++:
/// `Foo<int,char>::Inner`. `Foo` *must* be instantiated with template
/// arguments before we can gain access to the `Inner` member type.
- fn all_template_params(&self, ctx: &BindgenContext) -> Option<Vec<TypeId>>
+ fn all_template_params(&self, ctx: &BindgenContext) -> Vec<TypeId>
where
Self: ItemAncestors,
{
- let each_self_params: Vec<Vec<_>> = self.ancestors(ctx)
- .filter_map(|id| id.self_template_params(ctx))
- .collect();
- if each_self_params.is_empty() {
- None
- } else {
- Some(
- each_self_params
- .into_iter()
- .rev()
- .flat_map(|params| params)
- .collect(),
- )
- }
+ let ancestors: Vec<_> = self.ancestors(ctx).collect();
+ ancestors.into_iter().rev().flat_map(|id| {
+ id.self_template_params(ctx).into_iter()
+ }).collect()
}
/// Get only the set of template parameters that this item uses. This is a
/// subset of `all_template_params` and does not necessarily contain any of
/// `self_template_params`.
- fn used_template_params(&self, ctx: &BindgenContext) -> Option<Vec<TypeId>>
+ fn used_template_params(&self, ctx: &BindgenContext) -> Vec<TypeId>
where
Self: AsRef<ItemId>,
{
assert!(
ctx.in_codegen_phase(),
"template parameter usage is not computed until codegen"
);
let id = *self.as_ref();
- ctx.resolve_item(id).all_template_params(ctx).map(
- |all_params| {
- all_params
+ ctx.resolve_item(id).all_template_params(ctx)
.into_iter()
.filter(|p| ctx.uses_template_parameter(id, *p))
.collect()
- },
- )
}
}
/// A trait for things which may or may not be a named template type parameter.
pub trait AsTemplateParam {
/// Any extra information the implementor might need to make this decision.
type Extra;
--- a/third_party/rust/bindgen/src/ir/traversal.rs
+++ b/third_party/rust/bindgen/src/ir/traversal.rs
@@ -196,21 +196,23 @@ impl TraversalPredicate for for<'a> fn(&
/// A `TraversalPredicate` implementation that follows all edges, and therefore
/// traversals using this predicate will see the whole IR graph reachable from
/// the traversal's roots.
pub fn all_edges(_: &BindgenContext, _: Edge) -> bool {
true
}
-/// A `TraversalPredicate` implementation that never follows any edges, and
-/// therefore traversals using this predicate will only visit the traversal's
-/// roots.
-pub fn no_edges(_: &BindgenContext, _: Edge) -> bool {
- false
+/// A `TraversalPredicate` implementation that only follows
+/// `EdgeKind::InnerType` edges, and therefore traversals using this predicate
+/// will only visit the traversal's roots and their inner types. This is used
+/// in no-recursive-whitelist mode, where inner types such as anonymous
+/// structs/unions still need to be processed.
+pub fn only_inner_type_edges(_: &BindgenContext, edge: Edge) -> bool {
+ edge.kind == EdgeKind::InnerType
}
/// A `TraversalPredicate` implementation that only follows edges to items that
/// are enabled for code generation. This lets us skip considering items for
/// which are not reachable from code generation.
pub fn codegen_edges(ctx: &BindgenContext, edge: Edge) -> bool {
let cc = &ctx.options().codegen_config;
match edge.kind {
--- a/third_party/rust/bindgen/src/ir/ty.rs
+++ b/third_party/rust/bindgen/src/ir/ty.rs
@@ -11,17 +11,16 @@ use super::layout::{Layout, Opaque};
use super::objc::ObjCInterface;
use super::template::{AsTemplateParam, TemplateInstantiation,
TemplateParameters};
use super::traversal::{EdgeKind, Trace, Tracer};
use clang::{self, Cursor};
use parse::{ClangItemParser, ParseError, ParseResult};
use std::borrow::Cow;
use std::io;
-use std::mem;
/// The base representation of a type in bindgen.
///
/// A type has an optional name, which if present cannot be empty, a `layout`
/// (size, alignment and packedness) if known, a `Kind`, which determines which
/// kind of type it is, and whether the type is const.
#[derive(Debug)]
pub struct Type {
@@ -227,28 +226,26 @@ impl Type {
ctx.resolve_type(inner).is_incomplete_array(ctx)
}
_ => None,
}
}
/// What is the layout of this type?
pub fn layout(&self, ctx: &BindgenContext) -> Option<Layout> {
- use std::mem;
-
self.layout.or_else(|| {
match self.kind {
TypeKind::Comp(ref ci) => ci.layout(ctx),
// FIXME(emilio): This is a hack for anonymous union templates.
// Use the actual pointer size!
TypeKind::Pointer(..) |
TypeKind::BlockPointer => {
Some(Layout::new(
- mem::size_of::<*mut ()>(),
- mem::align_of::<*mut ()>(),
+ ctx.target_pointer_size(),
+ ctx.target_pointer_size(),
))
}
TypeKind::ResolvedTypeRef(inner) => {
ctx.resolve_type(inner).layout(ctx)
}
_ => None,
}
})
@@ -538,32 +535,32 @@ fn is_invalid_type_param_empty_name() {
let ty = Type::new(Some("".into()), None, TypeKind::TypeParam, false);
assert!(ty.is_invalid_type_param())
}
impl TemplateParameters for Type {
fn self_template_params(
&self,
ctx: &BindgenContext,
- ) -> Option<Vec<TypeId>> {
+ ) -> Vec<TypeId> {
self.kind.self_template_params(ctx)
}
}
impl TemplateParameters for TypeKind {
fn self_template_params(
&self,
ctx: &BindgenContext,
- ) -> Option<Vec<TypeId>> {
+ ) -> Vec<TypeId> {
match *self {
TypeKind::ResolvedTypeRef(id) => {
ctx.resolve_type(id).self_template_params(ctx)
}
TypeKind::Comp(ref comp) => comp.self_template_params(ctx),
- TypeKind::TemplateAlias(_, ref args) => Some(args.clone()),
+ TypeKind::TemplateAlias(_, ref args) => args.clone(),
TypeKind::Opaque |
TypeKind::TemplateInstantiation(..) |
TypeKind::Void |
TypeKind::NullPtr |
TypeKind::Int(_) |
TypeKind::Float(_) |
TypeKind::Complex(_) |
@@ -573,17 +570,17 @@ impl TemplateParameters for TypeKind {
TypeKind::Pointer(_) |
TypeKind::BlockPointer |
TypeKind::Reference(_) |
TypeKind::UnresolvedTypeRef(..) |
TypeKind::TypeParam |
TypeKind::Alias(_) |
TypeKind::ObjCId |
TypeKind::ObjCSel |
- TypeKind::ObjCInterface(_) => None,
+ TypeKind::ObjCInterface(_) => vec![],
}
}
}
/// The kind of float this type represents.
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum FloatKind {
/// A `float`.
@@ -591,27 +588,16 @@ pub enum FloatKind {
/// A `double`.
Double,
/// A `long double`.
LongDouble,
/// A `__float128`.
Float128,
}
-impl FloatKind {
- /// If this type has a known size, return it (in bytes).
- pub fn known_size(&self) -> usize {
- match *self {
- FloatKind::Float => mem::size_of::<f32>(),
- FloatKind::Double | FloatKind::LongDouble => mem::size_of::<f64>(),
- FloatKind::Float128 => mem::size_of::<f64>() * 2,
- }
- }
-}
-
/// The different kinds of types that we can parse.
#[derive(Debug)]
pub enum TypeKind {
/// The void type.
Void,
/// The `nullptr_t` type.
NullPtr,
@@ -1203,16 +1189,17 @@ impl Type {
location
);
return Err(ParseError::Continue);
}
}
};
let name = if name.is_empty() { None } else { Some(name) };
+
let is_const = ty.is_const();
let ty = Type::new(name, layout, kind, is_const);
// TODO: maybe declaration.canonical()?
Ok(ParseResult::New(ty, Some(cursor.canonical())))
}
}
--- a/third_party/rust/bindgen/src/lib.rs
+++ b/third_party/rust/bindgen/src/lib.rs
@@ -1,14 +1,17 @@
//! Generate Rust bindings for C and C++ libraries.
//!
//! Provide a C/C++ header file, receive Rust FFI code to call into C/C++
//! functions and use types defined in the header.
//!
//! See the [`Builder`](./struct.Builder.html) struct for usage.
+//!
+//! See the [Users Guide](https://rust-lang-nursery.github.io/rust-bindgen/) for
+//! additional documentation.
#![deny(missing_docs)]
#![deny(warnings)]
#![deny(unused_extern_crates)]
// To avoid rather annoying warnings when matching with CXCursor_xxx as a
// constant.
#![allow(non_upper_case_globals)]
// `quote!` nests quite deeply.
#![recursion_limit="128"]
@@ -18,16 +21,17 @@ extern crate cexpr;
#[allow(unused_extern_crates)]
extern crate cfg_if;
extern crate clang_sys;
#[macro_use]
extern crate lazy_static;
extern crate peeking_take_while;
#[macro_use]
extern crate quote;
+extern crate proc_macro2;
extern crate regex;
extern crate which;
#[cfg(feature = "logging")]
#[macro_use]
extern crate log;
#[cfg(not(feature = "logging"))]
@@ -74,18 +78,20 @@ doc_mod!(parse, parse_docs);
doc_mod!(regex_set, regex_set_docs);
pub use features::{LATEST_STABLE_RUST, RUST_TARGET_STRINGS, RustTarget};
use features::RustFeatures;
use ir::context::{BindgenContext, ItemId};
use ir::item::Item;
use parse::{ClangItemParser, ParseError};
use regex_set::RegexSet;
+pub use codegen::EnumVariation;
use std::borrow::Cow;
+use std::collections::HashMap;
use std::fs::{File, OpenOptions};
use std::io::{self, Write};
use std::iter;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::sync::Arc;
/// A type used to indicate which kind of items do we have to generate.
@@ -150,16 +156,33 @@ impl Default for CodegenConfig {
/// let bindings = builder().header("path/to/input/header")
/// .whitelisted_type("SomeCoolClass")
/// .whitelisted_function("do_some_cool_thing")
/// .generate()?;
///
/// // Write the generated bindings to an output file.
/// bindings.write_to_file("path/to/output.rs")?;
/// ```
+///
+/// # Enums
+///
+/// Bindgen can map C/C++ enums into Rust in different ways. The way bindgen maps enums depends on
+/// the pattern passed to several methods:
+///
+/// 1. [`constified_enum_module()`](#method.constified_enum_module)
+/// 2. [`bitfield_enum()`](#method.bitfield_enum)
+/// 3. [`rustified_enum()`](#method.rustified_enum)
+///
+/// For each C enum, bindgen tries to match the pattern in the following order:
+///
+/// 1. Constified enum module
+/// 2. Bitfield enum
+/// 3. Rustified enum
+///
+/// If none of the above patterns match, then bindgen will generate a set of Rust constants.
#[derive(Debug, Default)]
pub struct Builder {
options: BindgenOptions,
input_headers: Vec<String>,
// Tuples of unsaved file contents of the form (name, contents).
input_header_contents: Vec<(String, String)>,
}
@@ -176,16 +199,26 @@ impl Builder {
if let Some(header) = self.input_headers.last().cloned() {
// Positional argument 'header'
output_vector.push(header);
}
output_vector.push("--rust-target".into());
output_vector.push(self.options.rust_target.into());
+ if self.options.default_enum_style != Default::default() {
+ output_vector.push("--default-enum-variant=".into());
+ output_vector.push(match self.options.default_enum_style {
+ codegen::EnumVariation::Rust => "rust",
+ codegen::EnumVariation::Bitfield => "bitfield",
+ codegen::EnumVariation::Consts => "consts",
+ codegen::EnumVariation::ModuleConsts => "moduleconsts",
+ }.into())
+ }
+
self.options
.bitfield_enums
.get_items()
.iter()
.map(|item| {
output_vector.push("--bitfield-enum".into());
output_vector.push(
item.trim_left_matches("^")
@@ -219,16 +252,30 @@ impl Builder {
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
self.options
+ .constified_enums
+ .get_items()
+ .iter()
+ .map(|item| {
+ output_vector.push("--constified-enum".into());
+ output_vector.push(
+ item.trim_left_matches("^")
+ .trim_right_matches("$")
+ .into(),
+ );
+ })
+ .count();
+
+ self.options
.blacklisted_types
.get_items()
.iter()
.map(|item| {
output_vector.push("--blacklist-type".into());
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
@@ -321,29 +368,16 @@ impl Builder {
}
if self.options.enable_cxx_namespaces {
output_vector.push("--enable-cxx-namespaces".into());
}
if self.options.disable_name_namespacing {
output_vector.push("--disable-name-namespacing".into());
}
- self.options
- .links
- .iter()
- .map(|&(ref item, _)| {
- output_vector.push("--framework".into());
- output_vector.push(
- item.trim_left_matches("^")
- .trim_right_matches("$")
- .into(),
- );
- })
- .count();
-
if !self.options.codegen_config.functions {
output_vector.push("--ignore-functions".into());
}
output_vector.push("--generate".into());
//Temporary placeholder for below 4 options
let mut options: Vec<String> = Vec::new();
@@ -367,29 +401,16 @@ impl Builder {
}
output_vector.push(options.join(","));
if !self.options.codegen_config.methods {
output_vector.push("--ignore-methods".into());
}
- self.options
- .links
- .iter()
- .map(|&(ref item, _)| {
- output_vector.push("--clang-args".into());
- output_vector.push(
- item.trim_left_matches("^")
- .trim_right_matches("$")
- .into(),
- );
- })
- .count();
-
if !self.options.convert_floats {
output_vector.push("--no-convert-floats".into());
}
if !self.options.prepend_enum_name {
output_vector.push("--no-prepend-enum-name".into());
}
@@ -415,29 +436,16 @@ impl Builder {
output_vector.push(
item.trim_left_matches("^")
.trim_right_matches("$")
.into(),
);
})
.count();
- self.options
- .links
- .iter()
- .map(|&(ref item, _)| {
- output_vector.push("--static".into());
- output_vector.push(
- item.trim_left_matches("^")
- .trim_right_matches("$")
- .into(),
- );
- })
- .count();
-
if self.options.use_core {
output_vector.push("--use-core".into());
}
if self.options.conservative_inline_namespaces {
output_vector.push("--conservative-inline-namespaces".into());
}
@@ -595,16 +603,22 @@ impl Builder {
/// Specify the rust target
///
/// The default is the latest stable Rust version
pub fn rust_target(mut self, rust_target: RustTarget) -> Self {
self.options.set_rust_target(rust_target);
self
}
+ /// Disable support for native Rust unions, if supported.
+ pub fn disable_untagged_union(mut self) -> Self {
+ self.options.rust_features.untagged_union = false;
+ self
+ }
+
/// Set the output graphviz file.
pub fn emit_ir_graphviz<T: Into<String>>(mut self, path: T) -> Builder {
let path = path.into();
self.options.emit_ir_graphviz = Some(path);
self
}
/// Whether the generated bindings should contain documentation comments or
@@ -637,17 +651,17 @@ impl Builder {
/// function and the `MoonBoots` struct that it transitively references. By
/// configuring with `whitelist_recursively(false)`, `bindgen` will not emit
/// bindings for anything except the explicitly whitelisted items, and there
/// would be no emitted struct definition for `MoonBoots`. However, the
/// `initiate_dance_party` function would still reference `MoonBoots`!
///
/// **Disabling this feature will almost certainly cause `bindgen` to emit
/// bindings that will not compile!** If you disable this feature, then it
- /// is *your* responsiblity to provide definitions for every type that is
+ /// is *your* responsibility to provide definitions for every type that is
/// referenced from an explicitly whitelisted item. One way to provide the
/// definitions is by using the [`Builder::raw_line`](#method.raw_line)
/// method, another would be to define them in Rust and then `include!(...)`
/// the bindings immediately afterwards.
pub fn whitelist_recursively(mut self, doit: bool) -> Self {
self.options.whitelist_recursively = doit;
self
}
@@ -735,16 +749,21 @@ impl Builder {
/// Whitelist the given variable.
///
/// Deprecated: use whitelist_var instead.
#[deprecated(note = "use whitelist_var instead")]
pub fn whitelisted_var<T: AsRef<str>>(self, arg: T) -> Builder {
self.whitelist_var(arg)
}
+ /// Set the default style of code to generate for enums
+ pub fn default_enum_style(mut self, arg: codegen::EnumVariation) -> Builder {
+ self.options.default_enum_style = arg;
+ self
+ }
/// Mark the given enum (or set of enums, if using a pattern) as being
/// bitfield-like. Regular expressions are supported.
///
/// This makes bindgen generate a type that isn't a rust `enum`. Regular
/// expressions are supported.
pub fn bitfield_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.bitfield_enums.insert(arg);
@@ -762,32 +781,68 @@ impl Builder {
/// Take a look at https://github.com/rust-lang/rust/issues/36927 for
/// more information.
pub fn rustified_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.rustified_enums.insert(arg);
self
}
/// Mark the given enum (or set of enums, if using a pattern) as a set of
+ /// constants that are not to be put into a module.
+ pub fn constified_enum<T: AsRef<str>>(mut self, arg: T) -> Builder {
+ self.options.constified_enums.insert(arg);
+ self
+ }
+
+ /// Mark the given enum (or set of enums, if using a pattern) as a set of
/// constants that should be put into a module.
///
/// This makes bindgen generate modules containing constants instead of
/// just constants. Regular expressions are supported.
pub fn constified_enum_module<T: AsRef<str>>(mut self, arg: T) -> Builder {
self.options.constified_enum_modules.insert(arg);
self
}
/// Add a string to prepend to the generated bindings. The string is passed
/// through without any modification.
- pub fn raw_line<T: Into<String>>(mut self, arg: T) -> Builder {
+ pub fn raw_line<T: Into<String>>(mut self, arg: T) -> Self {
self.options.raw_lines.push(arg.into());
self
}
+ /// Add a given line to the beginning of module `mod`.
+ pub fn module_raw_line<T, U>(mut self, mod_: T, line: U) -> Self
+ where
+ T: Into<String>,
+ U: Into<String>,
+ {
+ self.options
+ .module_lines
+ .entry(mod_.into())
+ .or_insert_with(Vec::new)
+ .push(line.into());
+ self
+ }
+
+ /// Add a given set of lines to the beginning of module `mod`.
+ pub fn module_raw_lines<T, I>(mut self, mod_: T, lines: I) -> Self
+ where
+ T: Into<String>,
+ I: IntoIterator,
+ I::Item: Into<String>,
+ {
+ self.options
+ .module_lines
+ .entry(mod_.into())
+ .or_insert_with(Vec::new)
+ .extend(lines.into_iter().map(Into::into));
+ self
+ }
+
/// Add an argument to be passed straight through to clang.
pub fn clang_arg<T: Into<String>>(mut self, arg: T) -> Builder {
self.options.clang_args.push(arg.into());
self
}
/// Add arguments to be passed straight through to clang.
pub fn clang_args<I>(mut self, iter: I) -> Builder
@@ -796,36 +851,16 @@ impl Builder {
I::Item: AsRef<str>,
{
for arg in iter {
self = self.clang_arg(arg.as_ref())
}
self
}
- /// Make the generated bindings link the given shared library.
- pub fn link<T: Into<String>>(mut self, library: T) -> Builder {
- self.options.links.push((library.into(), LinkType::Default));
- self
- }
-
- /// Make the generated bindings link the given static library.
- pub fn link_static<T: Into<String>>(mut self, library: T) -> Builder {
- self.options.links.push((library.into(), LinkType::Static));
- self
- }
-
- /// Make the generated bindings link the given framework.
- pub fn link_framework<T: Into<String>>(mut self, library: T) -> Builder {
- self.options.links.push(
- (library.into(), LinkType::Framework),
- );
- self
- }
-
/// Emit bindings for builtin definitions (for example `__builtin_va_list`)
/// in the generated Rust.
pub fn emit_builtins(mut self) -> Builder {
self.options.builtins = true;
self
}
/// Avoid converting floats to `f32`/`f64` by default.
@@ -1237,31 +1272,34 @@ struct BindgenOptions {
whitelisted_types: RegexSet,
/// Whitelisted functions. See docs for `whitelisted_types` for more.
whitelisted_functions: RegexSet,
/// Whitelisted variables. See docs for `whitelisted_types` for more.
whitelisted_vars: RegexSet,
+ /// The default style of code to generate for enums
+ default_enum_style: codegen::EnumVariation,
+
/// The enum patterns to mark an enum as bitfield.
bitfield_enums: RegexSet,
/// The enum patterns to mark an enum as a Rust enum.
rustified_enums: RegexSet,
/// The enum patterns to mark an enum as a module of constants.
constified_enum_modules: RegexSet,
+ /// The enum patterns to mark an enum as a set of constants.
+ constified_enums: RegexSet,
+
/// Whether we should generate builtins or not.
builtins: bool,
- /// The set of libraries we should link in the generated Rust code.
- links: Vec<(String, LinkType)>,
-
/// True if we should dump the Clang AST for debugging purposes.
emit_ast: bool,
/// True if we should dump our internal IR for debugging purposes.
emit_ir: bool,
/// Output graphviz dot file.
emit_ir_graphviz: Option<String>,
@@ -1276,17 +1314,17 @@ struct BindgenOptions {
/// True if we should generate layout tests for generated structures.
layout_tests: bool,
/// True if we should implement the Debug trait for C/C++ structures and types
/// that do not support automatically deriving Debug.
impl_debug: bool,
/// True if we should implement the PartialEq trait for C/C++ structures and types
- /// that do not support autoamically deriving PartialEq.
+ /// that do not support automatically deriving PartialEq.
impl_partialeq: bool,
/// True if we should derive Copy trait implementations for C/C++ structures
/// and types.
derive_copy: bool,
/// True if we should derive Debug trait implementations for C/C++ structures
/// and types.
@@ -1330,19 +1368,25 @@ struct BindgenOptions {
namespaced_constants: bool,
/// True if we should use MSVC name mangling rules.
msvc_mangling: bool,
/// Whether we should convert float types to f32/f64 types.
convert_floats: bool,
- /// The set of raw lines to prepend to the generated Rust code.
+ /// The set of raw lines to prepend to the top-level module of generated
+ /// Rust code.
raw_lines: Vec<String>,
+ /// The set of raw lines to prepend to each of the modules.
+ ///
+ /// This only makes sense if the `enable_cxx_namespaces` option is set.
+ module_lines: HashMap<String, Vec<String>>,
+
/// The set of arguments to pass straight through to Clang.
clang_args: Vec<String>,
/// The input header file.
input_header: Option<String>,
/// Unsaved files for input.
input_unsaved_files: Vec<clang::UnsavedFile>,
@@ -1355,27 +1399,27 @@ struct BindgenOptions {
/// of them.
codegen_config: CodegenConfig,
/// Whether to treat inline namespaces conservatively.
///
/// See the builder method description for more details.
conservative_inline_namespaces: bool,
- /// Wether to keep documentation comments in the generated output. See the
+ /// Whether to keep documentation comments in the generated output. See the
/// documentation for more details.
generate_comments: bool,
/// Whether to generate inline functions. Defaults to false.
generate_inline_functions: bool,
- /// Wether to whitelist types recursively. Defaults to true.
+ /// Whether to whitelist types recursively. Defaults to true.
whitelist_recursively: bool,
- /// Intead of emitting 'use objc;' to files generated from objective c files,
+ /// Instead of emitting 'use objc;' to files generated from objective c files,
/// generate '#[macro_use] extern crate objc;'
objc_extern_crate: bool,
/// Whether to use the clang-provided name mangling. This is true and
/// probably needed for C++ features.
///
/// However, some old libclang versions seem to return incorrect results in
/// some cases for non-mangled functions, see [1], so we allow disabling it.
@@ -1418,16 +1462,17 @@ impl ::std::panic::UnwindSafe for Bindge
impl BindgenOptions {
fn build(&mut self) {
self.whitelisted_vars.build();
self.whitelisted_types.build();
self.whitelisted_functions.build();
self.blacklisted_types.build();
self.opaque_types.build();
self.bitfield_enums.build();
+ self.constified_enums.build();
self.constified_enum_modules.build();
self.rustified_enums.build();
self.no_partialeq_types.build();
self.no_copy_types.build();
self.no_hash_types.build();
}
/// Update rust target version
@@ -1452,21 +1497,22 @@ impl Default for BindgenOptions {
rust_target: rust_target,
rust_features: rust_target.into(),
blacklisted_types: Default::default(),
opaque_types: Default::default(),
rustfmt_path: Default::default(),
whitelisted_types: Default::default(),
whitelisted_functions: Default::default(),
whitelisted_vars: Default::default(),
+ default_enum_style: Default::default(),
bitfield_enums: Default::default(),
rustified_enums: Default::default(),
+ constified_enums: Default::default(),
constified_enum_modules: Default::default(),
builtins: false,
- links: vec![],
emit_ast: false,
emit_ir: false,
emit_ir_graphviz: None,
layout_tests: true,
impl_debug: false,
impl_partialeq: false,
derive_copy: true,
derive_debug: true,
@@ -1479,16 +1525,17 @@ impl Default for BindgenOptions {
enable_cxx_namespaces: false,
disable_name_namespacing: false,
use_core: false,
ctypes_prefix: None,
namespaced_constants: true,
msvc_mangling: false,
convert_floats: true,
raw_lines: vec![],
+ module_lines: HashMap::default(),
clang_args: vec![],
input_header: None,
input_unsaved_files: vec![],
parse_callbacks: None,
codegen_config: CodegenConfig::all(),
conservative_inline_namespaces: false,
generate_comments: true,
generate_inline_functions: false,
@@ -1501,29 +1548,16 @@ impl Default for BindgenOptions {
rustfmt_configuration_file: None,
no_partialeq_types: Default::default(),
no_copy_types: Default::default(),
no_hash_types: Default::default(),
}
}
}
-/// The linking type to use with a given library.
-///
-/// TODO: #104: This is ignored at the moment, but shouldn't be.
-#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
-pub enum LinkType {
- /// Use shared library linking. This is the default.
- Default,
- /// Use static linking.
- Static,
- /// The library is an OSX framework.
- Framework,
-}
-
fn ensure_libclang_is_loaded() {
if clang_sys::is_loaded() {
return;
}
// XXX (issue #350): Ensure that our dynamically loaded `libclang`
// doesn't get dropped prematurely, nor is loaded multiple times
// across different threads.
@@ -1691,25 +1725,25 @@ impl Bindings {
writer.write(line.as_bytes())?;
writer.write("\n".as_bytes())?;
}
if !self.options.raw_lines.is_empty() {
writer.write("\n".as_bytes())?;
}
- let bindings = self.module.as_str().to_string();
+ let bindings = self.module.to_string();
match self.rustfmt_generated_string(&bindings) {
Ok(rustfmt_bindings) => {
writer.write(rustfmt_bindings.as_bytes())?;
},
Err(err) => {
eprintln!("{:?}", err);
- writer.write(bindings.as_str().as_bytes())?;
+ writer.write(bindings.as_bytes())?;
},
}
Ok(())
}
/// Checks if rustfmt_bindings is set and runs rustfmt on the string
fn rustfmt_generated_string<'a>(
&self,
@@ -1860,23 +1894,23 @@ fn parse(context: &mut BindgenContext) -
"How did this happen?"
);
Ok(())
}
/// Extracted Clang version data
#[derive(Debug)]
pub struct ClangVersion {
- /// Major and minor semvar, if parsing was successful
+ /// Major and minor semver, if parsing was successful
pub parsed: Option<(u32, u32)>,
/// full version string
pub full: String,
}
-/// Get the major and the minor semvar numbers of Clang's version
+/// Get the major and the minor semver numbers of Clang's version
pub fn clang_version() -> ClangVersion {
if !clang_sys::is_loaded() {
// TODO(emilio): Return meaningful error (breaking).
clang_sys::load().expect("Unable to find libclang");
}
let raw_v: String = clang::extract_clang_version();
let split_v: Option<Vec<&str>> = raw_v.split_whitespace().nth(2).map(|v| {
--- a/third_party/rust/bindgen/src/options.rs
+++ b/third_party/rust/bindgen/src/options.rs
@@ -1,9 +1,9 @@
-use bindgen::{Builder, CodegenConfig, RUST_TARGET_STRINGS, RustTarget, builder};
+use bindgen::{Builder, CodegenConfig, RUST_TARGET_STRINGS, RustTarget, builder, EnumVariation};
use clap::{App, Arg};
use std::fs::File;
use std::io::{self, Error, ErrorKind, Write, stderr};
use std::path::PathBuf;
use std::str::FromStr;
/// Construct a new [`Builder`](./struct.Builder.html) from command line flags.
pub fn builder_from_flags<I>(
@@ -21,36 +21,50 @@ where
let matches = App::new("bindgen")
.version(option_env!("CARGO_PKG_VERSION").unwrap_or("unknown"))
.about("Generates Rust bindings from C/C++ headers.")
.usage("bindgen [FLAGS] [OPTIONS] <header> -- <clang-args>...")
.args(&[
Arg::with_name("header")
.help("C or C++ header file")
.required(true),
+ Arg::with_name("default-enum-style")
+ .long("default-enum-style")
+ .help("The default style of code used to generate enums.")
+ .value_name("variant")
+ .default_value("consts")
+ .possible_values(&["consts", "moduleconsts", "bitfield", "rust"])
+ .multiple(false),
Arg::with_name("bitfield-enum")
.long("bitfield-enum")
.help("Mark any enum whose name matches <regex> as a set of \
- bitfield flags instead of an enumeration.")
+ bitfield flags.")
.value_name("regex")
.takes_value(true)
.multiple(true)
.number_of_values(1),
Arg::with_name("rustified-enum")
.long("rustified-enum")
- .help("Mark any enum whose name matches <regex> as a Rust enum \
- instead of a set of constants.")
+ .help("Mark any enum whose name matches <regex> as a Rust enum.")
+ .value_name("regex")
+ .takes_value(true)
+ .multiple(true)
+ .number_of_values(1),
+ Arg::with_name("constified-enum")
+ .long("constified-enum")
+ .help("Mark any enum whose name matches <regex> as a series of \
+ constants.")
.value_name("regex")
.takes_value(true)
.multiple(true)
.number_of_values(1),
Arg::with_name("constified-enum-module")
.long("constified-enum-module")
.help("Mark any enum whose name matches <regex> as a module of \
- constants instead of just constants.")
+ constants.")
.value_name("regex")
.takes_value(true)
.multiple(true)
.number_of_values(1),
Arg::with_name("blacklist-type")
.long("blacklist-type")
.help("Mark <type> as hidden.")
.value_name("type")
@@ -144,42 +158,29 @@ where
Arg::with_name("enable-cxx-namespaces")
.long("enable-cxx-namespaces")
.help("Enable support for C++ namespaces."),
Arg::with_name("disable-name-namespacing")
.long("disable-name-namespacing")
.help("Disable namespacing via mangling, causing bindgen to \
generate names like \"Baz\" instead of \"foo_bar_Baz\" \
for an input name \"foo::bar::Baz\"."),
- Arg::with_name("framework")
- .long("framework-link")
- .help("Link to framework.")
- .takes_value(true)
- .multiple(true)
- .number_of_values(1),
Arg::with_name("ignore-functions")
.long("ignore-functions")
.help("Do not generate bindings for functions or methods. This \
is useful when you only care about struct layouts."),
Arg::with_name("generate")
.long("generate")
.help("Generate only given items, split by commas. \
Valid values are \"functions\",\"types\", \"vars\", \
\"methods\", \"constructors\" and \"destructors\".")
.takes_value(true),
Arg::with_name("ignore-methods")
.long("ignore-methods")
.help("Do not generate bindings for methods."),
- Arg::with_name("dynamic")
- .short("l")
- .long("link")
- .help("Link to dynamic library.")
- .takes_value(true)
- .multiple(true)
- .number_of_values(1),
Arg::with_name("no-convert-floats")
.long("no-convert-floats")
.help("Do not automatically convert floats to f32/f64."),
Arg::with_name("no-prepend-enum-name")
.long("no-prepend-enum-name")
.help("Do not prepend the enum name to bitfield or constant variants."),
Arg::with_name("unstable-rust")
.long("unstable-rust")
@@ -202,22 +203,16 @@ where
.help("Add a raw line of Rust code at the beginning of output.")
.takes_value(true)
.multiple(true)
.number_of_values(1),
Arg::with_name("rust-target")
.long("rust-target")
.help(&rust_target_help)
.takes_value(true),
- Arg::with_name("static")
- .long("static-link")
- .help("Link to static library.")
- .takes_value(true)
- .multiple(true)
- .number_of_values(1),
Arg::with_name("use-core")
.long("use-core")
.help("Use types from Rust core instead of std."),
Arg::with_name("conservative-inline-namespaces")
.long("conservative-inline-namespaces")
.help("Conservatively generate inline namespaces to avoid name \
conflicts."),
Arg::with_name("use-msvc-mangling")
@@ -317,28 +312,38 @@ where
"warning: the `--unstable-rust` option is deprecated"
).expect("Unable to write error message");
}
if let Some(rust_target) = matches.value_of("rust-target") {
builder = builder.rust_target(RustTarget::from_str(rust_target)?);
}
+ if let Some(variant) = matches.value_of("default-enum-style") {
+ builder = builder.default_enum_style(EnumVariation::from_str(variant)?)
+ }
+
if let Some(bitfields) = matches.values_of("bitfield-enum") {
for regex in bitfields {
builder = builder.bitfield_enum(regex);
}
}
if let Some(rustifieds) = matches.values_of("rustified-enum") {
for regex in rustifieds {
builder = builder.rustified_enum(regex);
}
}
+ if let Some(bitfields) = matches.values_of("constified-enum") {
+ for regex in bitfields {
+ builder = builder.constified_enum(regex);
+ }
+ }
+
if let Some(constified_mods) = matches.values_of("constified-enum-module") {
for regex in constified_mods {
builder = builder.constified_enum_module(regex);
}
}
if let Some(hidden_types) = matches.values_of("blacklist-type") {
for ty in hidden_types {
builder = builder.blacklist_type(ty);
@@ -404,22 +409,16 @@ where
if matches.is_present("time-phases") {
builder = builder.time_phases(true);
}
if let Some(prefix) = matches.value_of("ctypes-prefix") {
builder = builder.ctypes_prefix(prefix);
}
- if let Some(links) = matches.values_of("dynamic") {
- for library in links {
- builder = builder.link(library);
- }
- }
-
if let Some(what_to_generate) = matches.value_of("generate") {
let mut config = CodegenConfig::nothing();
for what in what_to_generate.split(",") {
match what {
"functions" => config.functions = true,
"types" => config.types = true,
"vars" => config.vars = true,
"methods" => config.methods = true,
@@ -451,22 +450,16 @@ where
if matches.is_present("enable-cxx-namespaces") {
builder = builder.enable_cxx_namespaces();
}
if matches.is_present("disable-name-namespacing") {
builder = builder.disable_name_namespacing();
}
- if let Some(links) = matches.values_of("framework") {
- for framework in links {
- builder = builder.link_framework(framework);
- }
- }
-
if matches.is_present("ignore-functions") {
builder = builder.ignore_functions();
}
if matches.is_present("ignore-methods") {
builder = builder.ignore_methods();
}
@@ -493,22 +486,16 @@ where
}
if let Some(lines) = matches.values_of("raw-line") {
for line in lines {
builder = builder.raw_line(line);
}
}
- if let Some(links) = matches.values_of("static") {
- for library in links {
- builder = builder.link_static(library);
- }
- }
-
if matches.is_present("use-core") {
builder = builder.use_core();
}
if matches.is_present("distrust-clang-mangling") {
builder = builder.trust_clang_mangling(false);
}
--- a/third_party/rust/bindgen/src/time.rs
+++ b/third_party/rust/bindgen/src/time.rs
@@ -17,17 +17,17 @@ impl<'a> Timer<'a> {
pub fn new(name: &'a str) -> Self {
Timer {
output: true,
name,
start: Instant::now()
}
}
- /// Sets whether or not the Timer will print a mesage
+ /// Sets whether or not the Timer will print a message
/// when it is dropped.
pub fn with_output(mut self, output: bool) -> Self {
self.output = output;
self
}
/// Returns the time elapsed since the timer's creation
pub fn elapsed(&self) -> Duration {
--- a/third_party/rust/clang-sys/.cargo-checksum.json
+++ b/third_party/rust/clang-sys/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"d5c89494c836e00ec8c3c02c9e228bf5dc34aabff203c37662a248e2da4bda05","CHANGELOG.md":"2d6ade5cd80d388392915d24f4712e8fd604579c7b26d48aa99908499e08817d","CONTRIBUTING.md":"4e2a45992604f07a37030bb1fc598c6f54a1785747c4f37a15a37481bbdecce8","Cargo.toml":"c2256b278d66bf80e6b62e58efa62a5fb98665c8c23dec68829b2c9fc2191962","LICENSE.txt":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","README.md":"dff1b472fe1edbc6059ff5a96e595fa8dab9e9e133d10fd761cf5dfdcc80f4c6","appveyor.yml":"c9ab8ab1ab028b27d2be176e994a0d6a255cf8bcc36e15868472b6b8abf33fac","build.rs":"e6cf2ec64466b21f3e6eb8aaf7327ccdfcc77a2067e0e19ee5e2a6f57117c09b","ci/before_install.sh":"711c9d0539fa0372980c3a288d9482a0e46d3ba0fb8f7c7c110d6488a8ec4de5","ci/install.bat":"fb636c3511ba038ccf805755ef6542237cc595e905edcd61d56abd7163321f76","ci/script.sh":"1bb1cd29bd9635cc126cdcbd6c02f3500620a231a86726bf2165a4b74baaf433","ci/test_script.bat":"73462f51aaa9a1c14ce9f55c41dc3672df64faa9789725384ae4f28d8ba3c90b","clippy.toml":"acef14b9acffa18d1069ae08a4e8fe824a614f91b0bc71a6b1c68e4d885397e6","src/lib.rs":"a0410d2e23f808ba441e7bebe84785e85582c8130613e49c7b3b98e0aaf55b66","src/link.rs":"7323c3ddcd8038b899c21f7087666628f88e9cb430900549855ea78717824e6f","src/support.rs":"ecd0489662caad0a13ea468cbbbec3ca62ba9796a6e24aabb392978a3455ebfd","tests/header.h":"b1cf564b21d76db78529d1934e1481a5f0452fdedc6e32954608293c310498b6","tests/lib.rs":"e5e8a60bcaec3b5d043fde4a993d397adb56454d0b2a6adaa15df0535246f909"},"package":"939a1a34310b120d26eba35c29475933128b0ec58e24b43327f8dbe6036fc538"}
\ No newline at end of file
+{"files":{".travis.yml":"d5c89494c836e00ec8c3c02c9e228bf5dc34aabff203c37662a248e2da4bda05","CHANGELOG.md":"62fd8ba43afbc4da3dba40d448a5af482794aaaa99071d40dc7abf8fc1a2195b","Cargo.toml":"1ada60cd29713d4386050d2b61a9eed430827885520816b0412ed0380fa3fa8f","LICENSE.txt":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","README.md":"dff1b472fe1edbc6059ff5a96e595fa8dab9e9e133d10fd761cf5dfdcc80f4c6","appveyor.yml":"c9ab8ab1ab028b27d2be176e994a0d6a255cf8bcc36e15868472b6b8abf33fac","build.rs":"50be9c247e528ab0a354a7652fa9516906f79bbb4d128d54db7f5a9ee1ed2a86","ci/before_install.sh":"711c9d0539fa0372980c3a288d9482a0e46d3ba0fb8f7c7c110d6488a8ec4de5","ci/install.bat":"fb636c3511ba038ccf805755ef6542237cc595e905edcd61d56abd7163321f76","ci/script.sh":"1bb1cd29bd9635cc126cdcbd6c02f3500620a231a86726bf2165a4b74baaf433","ci/test_script.bat":"73462f51aaa9a1c14ce9f55c41dc3672df64faa9789725384ae4f28d8ba3c90b","clippy.toml":"acef14b9acffa18d1069ae08a4e8fe824a614f91b0bc71a6b1c68e4d885397e6","src/lib.rs":"d0a11284694f4f77448e72480addca613572d19c07fa92157e0fa717ed504abd","src/link.rs":"a0208e6b8e4840f1162b3b799b5e12dd559cc6f31a330b0eb1ba4ebe2385296d","src/support.rs":"70e77ea4337f740b13c394034c5705e962af6ee7ac4843fc7c9c7fe22ec2d074","tests/header.h":"b1cf564b21d76db78529d1934e1481a5f0452fdedc6e32954608293c310498b6","tests/lib.rs":"e5e8a60bcaec3b5d043fde4a993d397adb56454d0b2a6adaa15df0535246f909"},"package":"d7f7c04e52c35222fffcc3a115b5daf5f7e2bfb71c13c4e2321afe1fc71859c2"}
\ No newline at end of file
--- a/third_party/rust/clang-sys/CHANGELOG.md
+++ b/third_party/rust/clang-sys/CHANGELOG.md
@@ -1,8 +1,13 @@
+## [0.23.0] - 2018-06-16
+
+### Changed
+- Changed `Clang::find` to skip dynamic libraries for an incorrect architecture on Windows
+
## [0.22.0] - 2018-03-11
### Added
- Added support for `clang` 6.0.x
- Bumped `libc` version to `0.2.39`
- Bumped `libloading` version to `0.5.0`
## [0.21.2] - 2018-02-17
deleted file mode 100644
--- a/third_party/rust/clang-sys/CONTRIBUTING.md
+++ /dev/null
@@ -1,7 +0,0 @@
-# Contributing to clang-sys
-
-## Pull Requests
-
-If you are intending to make a pull request, please make your changes in a branch that originated
-from the `development` branch, not the `master` branch. Then, make your pull request against the
-`development` branch.
--- a/third_party/rust/clang-sys/Cargo.toml
+++ b/third_party/rust/clang-sys/Cargo.toml
@@ -7,42 +7,34 @@
#
# If you believe there's an error in this file please file an
# issue against the rust-lang/cargo repository. If you're
# editing this file be aware that the upstream Cargo.toml
# will likely look very different (and much more reasonable)
[package]
name = "clang-sys"
-version = "0.22.0"
+version = "0.23.0"
authors = ["Kyle Mayes <kyle@mayeses.com>"]
build = "build.rs"
links = "clang"
description = "Rust bindings for libclang."
documentation = "https://kylemayes.github.io/clang-sys/3_5/clang_sys"
readme = "README.md"
license = "Apache-2.0"
repository = "https://github.com/KyleMayes/clang-sys"
-[dependencies.clippy]
-version = "0.0.*"
-optional = true
-
[dependencies.glob]
version = "0.2.11"
[dependencies.libc]
version = "0.2.39"
[dependencies.libloading]
version = "0.5.0"
optional = true
-[build-dependencies.clippy]
-version = "0.0.*"
-optional = true
-
[build-dependencies.glob]
version = "0.2.11"
[features]
clang_3_5 = []
clang_3_6 = ["gte_clang_3_6"]
clang_3_7 = ["gte_clang_3_6", "gte_clang_3_7"]
clang_3_8 = ["gte_clang_3_6", "gte_clang_3_7", "gte_clang_3_8"]
--- a/third_party/rust/clang-sys/build.rs
+++ b/third_party/rust/clang-sys/build.rs
@@ -20,25 +20,21 @@
//! static or dynamic libraries.
//!
//! * `LLVM_CONFIG_PATH` - provides a path to an `llvm-config` executable
//! * `LIBCLANG_PATH` - provides a path to a directory containing a `libclang` shared library
//! * `LIBCLANG_STATIC_PATH` - provides a path to a directory containing LLVM and Clang static libraries
#![allow(unused_attributes)]
-#![cfg_attr(feature="clippy", feature(plugin))]
-#![cfg_attr(feature="clippy", plugin(clippy))]
-#![cfg_attr(feature="clippy", warn(clippy))]
-
extern crate glob;
use std::env;
use std::fs::{self, File};
-use std::io::{Read};
+use std::io::{Read, Seek, SeekFrom};
use std::path::{Path, PathBuf};
use std::process::{Command};
use glob::{MatchOptions};
/// Returns the version in the supplied file if one can be found.
fn find_version(file: &str) -> Option<&str> {
if file.starts_with("libclang.so.") {
@@ -123,43 +119,85 @@ const SEARCH_OSX: &[&str] = &[
/// Backup search directory globs for Windows.
const SEARCH_WINDOWS: &[&str] = &[
"C:\\LLVM\\lib",
"C:\\Program Files*\\LLVM\\lib",
"C:\\MSYS*\\MinGW*\\lib",
];
+/// Returns the ELF class from the ELF header in the supplied file.
+fn parse_elf_header(file: &PathBuf) -> Result<u8, String> {
+ let mut file = try!(File::open(file).map_err(|e| e.to_string()));
+ let mut elf = [0; 5];
+ try!(file.read_exact(&mut elf).map_err(|e| e.to_string()));
+ if elf[..4] == [127, 69, 76, 70] {
+ Ok(elf[4])
+ } else {
+ Err("invalid ELF header".into())
+ }
+}
+
+/// Returns the magic number from the PE header in the supplied file.
+fn parse_pe_header(file: &PathBuf) -> Result<u16, String> {
+ let mut file = try!(File::open(file).map_err(|e| e.to_string()));
+ let mut pe = [0; 4];
+
+ // Determine the header offset.
+ try!(file.seek(SeekFrom::Start(0x3C)).map_err(|e| e.to_string()));
+ try!(file.read_exact(&mut pe).map_err(|e| e.to_string()));
+ let offset = i32::from(pe[0]) + (i32::from(pe[1]) << 8) + (i32::from(pe[2]) << 16) + (i32::from(pe[3]) << 24);
+
+ // Determine the validity of the header.
+ try!(file.seek(SeekFrom::Start(offset as u64)).map_err(|e| e.to_string()));
+ try!(file.read_exact(&mut pe).map_err(|e| e.to_string()));
+ if pe != [80, 69, 0, 0] {
+ return Err("invalid PE header".into());
+ }
+
+ // Find the magic number.
+ try!(file.seek(SeekFrom::Current(20)).map_err(|e| e.to_string()));
+ try!(file.read_exact(&mut pe).map_err(|e| e.to_string()));
+ Ok(u16::from(pe[0]) + (u16::from(pe[1]) << 8))
+}
+
/// Indicates the type of library being searched for.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Library {
Dynamic,
Static,
}
impl Library {
/// Checks whether the supplied file is a valid library for the architecture.
fn check(&self, file: &PathBuf) -> Result<(), String> {
if cfg!(any(target_os="freebsd", target_os="linux")) {
if *self == Library::Static {
return Ok(());
}
- let mut file = try!(File::open(file).map_err(|e| e.to_string()));
- let mut elf = [0; 5];
- try!(file.read_exact(&mut elf).map_err(|e| e.to_string()));
- if elf[..4] != [127, 69, 76, 70] {
- return Err("invalid ELF header".into());
- }
- if cfg!(target_pointer_width="32") && elf[4] != 1 {
+ let class = try!(parse_elf_header(file));
+ if cfg!(target_pointer_width="32") && class != 1 {
return Err("invalid ELF class (64-bit)".into());
}
- if cfg!(target_pointer_width="64") && elf[4] != 2 {
+ if cfg!(target_pointer_width="64") && class != 2 {
return Err("invalid ELF class (32-bit)".into());
}
Ok(())
+ } else if cfg!(target_os="windows") {
+ if *self == Library::Static {
+ return Ok(());
+ }
+ let magic = try!(parse_pe_header(file));
+ if cfg!(target_pointer_width="32") && magic != 267 {
+ return Err("invalid DLL (64-bit)".into());
+ }
+ if cfg!(target_pointer_width="64") && magic != 523 {
+ return Err("invalid DLL (32-bit)".into());
+ }
+ Ok(())
} else {
Ok(())
}
}
}
/// Searches for a library, returning the directory it can be found in if the search was successful.
fn find(library: Library, files: &[String], env: &str) -> Result<PathBuf, String> {
@@ -211,17 +249,17 @@ fn find(library: Library, files: &[Strin
let lib = directory.join("lib");
if let Some(file) = contains(&lib, files) {
try_file!(file);
}
}
// Search the `LD_LIBRARY_PATH` directories.
if let Ok(path) = env::var("LD_LIBRARY_PATH") {
- for directory in path.split(":").map(Path::new) {
+ for directory in path.split(':').map(Path::new) {
search_directory!(directory);
}
}
// Search the backup directories.
let search = if cfg!(any(target_os="freebsd", target_os="linux")) {
SEARCH_LINUX
} else if cfg!(target_os="macos") {
--- a/third_party/rust/clang-sys/src/lib.rs
+++ b/third_party/rust/clang-sys/src/lib.rs
@@ -22,20 +22,17 @@
//! * 3.8 - [Documentation](https://kylemayes.github.io/clang-sys/3_8/clang_sys)
//! * 3.9 - [Documentation](https://kylemayes.github.io/clang-sys/3_9/clang_sys)
//! * 4.0 - [Documentation](https://kylemayes.github.io/clang-sys/4_0/clang_sys)
//! * 5.0 - [Documentation](https://kylemayes.github.io/clang-sys/5_0/clang_sys)
//! * 6.0 - [Documentation](https://kylemayes.github.io/clang-sys/6_0/clang_sys)
#![allow(non_camel_case_types, non_snake_case, non_upper_case_globals)]
-#![cfg_attr(feature="clippy", feature(plugin))]
-#![cfg_attr(feature="clippy", plugin(clippy))]
-#![cfg_attr(feature="clippy", warn(clippy))]
-#![cfg_attr(feature="clippy", allow(unreadable_literal))]
+#![cfg_attr(feature="cargo-clippy", allow(unreadable_literal))]
extern crate glob;
extern crate libc;
#[cfg(feature="runtime")]
extern crate libloading;
pub mod support;
--- a/third_party/rust/clang-sys/src/link.rs
+++ b/third_party/rust/clang-sys/src/link.rs
@@ -75,17 +75,17 @@ macro_rules! link {
match l.borrow().as_ref() {
Some(library) => Some(f(&library)),
_ => None,
}
})
}
$(
- #[cfg_attr(feature="clippy", allow(too_many_arguments))]
+ #[cfg_attr(feature="cargo-clippy", allow(too_many_arguments))]
$(#[cfg($cfg)])*
pub unsafe fn $name($($pname: $pty), *) $(-> $ret)* {
let f = with_library(|l| {
match l.functions.$name {
Some(f) => f,
_ => panic!(concat!("function not loaded: ", stringify!($name))),
}
}).expect("a `libclang` shared library is not loaded on this thread");
--- a/third_party/rust/clang-sys/src/support.rs
+++ b/third_party/rust/clang-sys/src/support.rs
@@ -58,22 +58,17 @@ pub struct Clang {
impl Clang {
//- Constructors -----------------------------
fn new(path: PathBuf, args: &[String]) -> Clang {
let version = parse_version(&path);
let c_search_paths = parse_search_paths(&path, "c", args);
let cpp_search_paths = parse_search_paths(&path, "c++", args);
- Clang {
- path: path,
- version: version,
- c_search_paths: c_search_paths,
- cpp_search_paths: cpp_search_paths,
- }
+ Clang { path, version, c_search_paths, cpp_search_paths }
}
/// Returns a `clang` executable if one can be found.
///
/// If the `CLANG_PATH` environment variable is set, that is the instance of `clang` used.
/// Otherwise, a series of directories are searched. First, If a path is supplied, that is the
/// first directory searched. Then, the directory returned by `llvm-config --bindir` is
/// searched. On OS X systems, `xcodebuild -find clang` will next be queried. Last, the
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2-0.3.5/.cargo-checksum.json
@@ -0,0 +1,1 @@
+{"files":{".travis.yml":"872a0d195dcb1e84f28aa994f301c7139f70360bb42dee3954df5ee965efea15","Cargo.toml":"e71f764696d6998512da00a9ac309f2717d103707aeef81164f906c1588ede63","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"ce05336717e1e90724491a2f54487c41c752fa2d32396639439f7c6d0f1e6776","src/lib.rs":"3280d7e0b6043d8472f418aa5c8458c97aa2b5a572f9156a251b5672828468c2","src/stable.rs":"4b5a65bd5dc174dd027b9ee951844c3765450f9d45961a8d6cd7d5f85b4c25c8","src/strnom.rs":"129fe22f0b50e5a64fca82e731c959135381c910e19f3305ef35420e0aadde08","src/unstable.rs":"b43c713ac16d9de0ba0fa1b9bebe390122b4ad60ef2fc75408f721305fdcd46b","tests/test.rs":"b06713fd8bd93ab9f0156bd25152e08f68a71b35e064c53b584f7f7dbb9b60b8"},"package":"77997c53ae6edd6d187fec07ec41b207063b5ee6f33680e9fa86d405cdd313d4"}
\ No newline at end of file
rename from third_party/rust/proc-macro2-0.3.6/.travis.yml
rename to third_party/rust/proc-macro2-0.3.5/.travis.yml
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2-0.3.5/Cargo.toml
@@ -0,0 +1,33 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "proc-macro2"
+version = "0.3.5"
+authors = ["Alex Crichton <alex@alexcrichton.com>"]
+description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
+homepage = "https://github.com/alexcrichton/proc-macro2"
+documentation = "https://docs.rs/proc-macro2"
+readme = "README.md"
+keywords = ["macros"]
+license = "MIT/Apache-2.0"
+repository = "https://github.com/alexcrichton/proc-macro2"
+
+[lib]
+doctest = false
+[dependencies.unicode-xid]
+version = "0.1"
+
+[features]
+default = ["proc-macro"]
+nightly = ["proc-macro"]
+proc-macro = []
rename from third_party/rust/proc-macro2-0.3.6/LICENSE-APACHE
rename to third_party/rust/proc-macro2-0.3.5/LICENSE-APACHE
rename from third_party/rust/proc-macro2-0.3.6/LICENSE-MIT
rename to third_party/rust/proc-macro2-0.3.5/LICENSE-MIT
rename from third_party/rust/proc-macro2-0.3.6/README.md
rename to third_party/rust/proc-macro2-0.3.5/README.md
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2-0.3.5/src/lib.rs
@@ -0,0 +1,590 @@
+//! A "shim crate" intended to multiplex the [`proc_macro`] API on to stable
+//! Rust.
+//!
+//! Procedural macros in Rust operate over the upstream
+//! [`proc_macro::TokenStream`][ts] type. This type currently is quite
+//! conservative and exposed no internal implementation details. Nightly
+//! compilers, however, contain a much richer interface. This richer interface
+//! allows fine-grained inspection of the token stream which avoids
+//! stringification/re-lexing and also preserves span information.
+//!
+//! The upcoming APIs added to [`proc_macro`] upstream are the foundation for
+//! productive procedural macros in the ecosystem. To help prepare the ecosystem
+//! for using them this crate serves to both compile on stable and nightly and
+//! mirrors the API-to-be. The intention is that procedural macros which switch
+//! to use this crate will be trivially able to switch to the upstream
+//! `proc_macro` crate once its API stabilizes.
+//!
+//! In the meantime this crate also has a `nightly` Cargo feature which
+//! enables it to reimplement itself with the unstable API of [`proc_macro`].
+//! This'll allow immediate usage of the beneficial upstream API, particularly
+//! around preserving span information.
+//!
+//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
+//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
+
+// Proc-macro2 types in rustdoc of other crates get linked to here.
+#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.5")]
+#![cfg_attr(feature = "nightly", feature(proc_macro))]
+
+#[cfg(feature = "proc-macro")]
+extern crate proc_macro;
+
+#[cfg(not(feature = "nightly"))]
+extern crate unicode_xid;
+
+use std::fmt;
+use std::iter::FromIterator;
+use std::marker;
+use std::rc::Rc;
+use std::str::FromStr;
+
+#[macro_use]
+#[cfg(not(feature = "nightly"))]
+mod strnom;
+
+#[path = "stable.rs"]
+#[cfg(not(feature = "nightly"))]
+mod imp;
+#[path = "unstable.rs"]
+#[cfg(feature = "nightly")]
+mod imp;
+
+#[derive(Clone)]
+pub struct TokenStream {
+ inner: imp::TokenStream,
+ _marker: marker::PhantomData<Rc<()>>,
+}
+
+pub struct LexError {
+ inner: imp::LexError,
+ _marker: marker::PhantomData<Rc<()>>,
+}
+
+impl TokenStream {
+ fn _new(inner: imp::TokenStream) -> TokenStream {
+ TokenStream {
+ inner: inner,
+ _marker: marker::PhantomData,
+ }
+ }
+
+ pub fn empty() -> TokenStream {
+ TokenStream::_new(imp::TokenStream::empty())
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ let e = src.parse().map_err(|e| LexError {
+ inner: e,
+ _marker: marker::PhantomData,
+ })?;
+ Ok(TokenStream::_new(e))
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+impl From<proc_macro::TokenStream> for TokenStream {
+ fn from(inner: proc_macro::TokenStream) -> TokenStream {
+ TokenStream::_new(inner.into())
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+impl From<TokenStream> for proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> proc_macro::TokenStream {
+ inner.inner.into()
+ }
+}
+
+impl FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+ TokenStream::_new(streams.into_iter().collect())
+ }
+}
+
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl fmt::Debug for LexError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+// Returned by reference, so we can't easily wrap it.
+#[cfg(procmacro2_semver_exempt)]
+pub use imp::FileName;
+
+#[cfg(procmacro2_semver_exempt)]
+#[derive(Clone, PartialEq, Eq)]
+pub struct SourceFile(imp::SourceFile);
+
+#[cfg(procmacro2_semver_exempt)]
+impl SourceFile {
+ /// Get the path to this source file as a string.
+ pub fn path(&self) -> &FileName {
+ self.0.path()
+ }
+
+ pub fn is_real(&self) -> bool {
+ self.0.is_real()
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl AsRef<FileName> for SourceFile {
+ fn as_ref(&self) -> &FileName {
+ self.0.path()
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+pub struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+}
+
+#[derive(Copy, Clone)]
+pub struct Span {
+ inner: imp::Span,
+ _marker: marker::PhantomData<Rc<()>>,
+}
+
+impl Span {
+ fn _new(inner: imp::Span) -> Span {
+ Span {
+ inner: inner,
+ _marker: marker::PhantomData,
+ }
+ }
+
+ pub fn call_site() -> Span {
+ Span::_new(imp::Span::call_site())
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn def_site() -> Span {
+ Span::_new(imp::Span::def_site())
+ }
+
+ /// Creates a new span with the same line/column information as `self` but
+ /// that resolves symbols as though it were at `other`.
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn resolved_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.resolved_at(other.inner))
+ }
+
+ /// Creates a new span with the same name resolution behavior as `self` but
+ /// with the line/column information of `other`.
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn located_at(&self, other: Span) -> Span {
+ Span::_new(self.inner.located_at(other.inner))
+ }
+
+ /// This method is only available when the `"nightly"` feature is enabled.
+ #[cfg(all(feature = "nightly", feature = "proc-macro"))]
+ pub fn unstable(self) -> proc_macro::Span {
+ self.inner.unstable()
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn source_file(&self) -> SourceFile {
+ SourceFile(self.inner.source_file())
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn start(&self) -> LineColumn {
+ let imp::LineColumn { line, column } = self.inner.start();
+ LineColumn {
+ line: line,
+ column: column,
+ }
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn end(&self) -> LineColumn {
+ let imp::LineColumn { line, column } = self.inner.end();
+ LineColumn {
+ line: line,
+ column: column,
+ }
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn join(&self, other: Span) -> Option<Span> {
+ self.inner.join(other.inner).map(Span::_new)
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn eq(&self, other: &Span) -> bool {
+ self.inner.eq(&other.inner)
+ }
+}
+
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+#[derive(Clone, Debug)]
+pub enum TokenTree {
+ Group(Group),
+ Term(Term),
+ Op(Op),
+ Literal(Literal),
+}
+
+impl TokenTree {
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Term(ref t) => t.span(),
+ TokenTree::Op(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Term(ref mut t) => t.set_span(span),
+ TokenTree::Op(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
+}
+
+impl From<Term> for TokenTree {
+ fn from(g: Term) -> TokenTree {
+ TokenTree::Term(g)
+ }
+}
+
+impl From<Op> for TokenTree {
+ fn from(g: Op) -> TokenTree {
+ TokenTree::Op(g)
+ }
+}
+
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
+ }
+}
+
+impl fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ TokenTree::Group(ref t) => t.fmt(f),
+ TokenTree::Term(ref t) => t.fmt(f),
+ TokenTree::Op(ref t) => t.fmt(f),
+ TokenTree::Literal(ref t) => t.fmt(f),
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum Delimiter {
+ Parenthesis,
+ Brace,
+ Bracket,
+ None,
+}
+
+impl Group {
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group {
+ delimiter: delimiter,
+ stream: stream,
+ span: Span::call_site(),
+ }
+ }
+
+ pub fn delimiter(&self) -> Delimiter {
+ self.delimiter
+ }
+
+ pub fn stream(&self) -> TokenStream {
+ self.stream.clone()
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.stream.fmt(f)
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+pub struct Op {
+ op: char,
+ spacing: Spacing,
+ span: Span,
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum Spacing {
+ Alone,
+ Joint,
+}
+
+impl Op {
+ pub fn new(op: char, spacing: Spacing) -> Op {
+ Op {
+ op: op,
+ spacing: spacing,
+ span: Span::call_site(),
+ }
+ }
+
+ pub fn op(&self) -> char {
+ self.op
+ }
+
+ pub fn spacing(&self) -> Spacing {
+ self.spacing
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+impl fmt::Display for Op {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.op.fmt(f)
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Term {
+ inner: imp::Term,
+ _marker: marker::PhantomData<Rc<()>>,
+}
+
+impl Term {
+ fn _new(inner: imp::Term) -> Term {
+ Term {
+ inner: inner,
+ _marker: marker::PhantomData,
+ }
+ }
+
+ pub fn new(string: &str, span: Span) -> Term {
+ Term::_new(imp::Term::new(string, span.inner))
+ }
+
+ pub fn as_str(&self) -> &str {
+ self.inner.as_str()
+ }
+
+ pub fn span(&self) -> Span {
+ Span::_new(self.inner.span())
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.inner.set_span(span.inner);
+ }
+}
+
+impl fmt::Display for Term {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.as_str().fmt(f)
+ }
+}
+
+impl fmt::Debug for Term {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+#[derive(Clone)]
+pub struct Literal {
+ inner: imp::Literal,
+ _marker: marker::PhantomData<Rc<()>>,
+}
+
+macro_rules! int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(imp::Literal::$name(n))
+ }
+ )*)
+}
+
+impl Literal {
+ fn _new(inner: imp::Literal) -> Literal {
+ Literal {
+ inner: inner,
+ _marker: marker::PhantomData,
+ }
+ }
+
+ int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ isize_suffixed => isize,
+
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f64_unsuffixed(f))
+ }
+
+ pub fn f64_suffixed(f: f64) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f64_suffixed(f))
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f32_unsuffixed(f))
+ }
+
+ pub fn f32_suffixed(f: f32) -> Literal {
+ assert!(f.is_finite());
+ Literal::_new(imp::Literal::f32_suffixed(f))
+ }
+
+ pub fn string(string: &str) -> Literal {
+ Literal::_new(imp::Literal::string(string))
+ }
+
+ pub fn character(ch: char) -> Literal {
+ Literal::_new(imp::Literal::character(ch))
+ }
+
+ pub fn byte_string(s: &[u8]) -> Literal {
+ Literal::_new(imp::Literal::byte_string(s))
+ }
+
+ pub fn span(&self) -> Span {
+ Span::_new(self.inner.span())
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.inner.set_span(span.inner);
+ }
+}
+
+impl fmt::Debug for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+}
+
+pub mod token_stream {
+ use std::fmt;
+ use std::marker;
+ use std::rc::Rc;
+
+ pub use TokenStream;
+ use TokenTree;
+ use imp;
+
+ pub struct IntoIter {
+ inner: imp::TokenTreeIter,
+ _marker: marker::PhantomData<Rc<()>>,
+ }
+
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ self.inner.next()
+ }
+ }
+
+ impl fmt::Debug for IntoIter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.inner.fmt(f)
+ }
+ }
+
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter {
+ inner: self.inner.into_iter(),
+ _marker: marker::PhantomData,
+ }
+ }
+ }
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2-0.3.5/src/stable.rs
@@ -0,0 +1,1227 @@
+#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
+
+use std::borrow::Borrow;
+use std::cell::RefCell;
+#[cfg(procmacro2_semver_exempt)]
+use std::cmp;
+use std::collections::HashMap;
+use std::fmt;
+use std::iter;
+use std::rc::Rc;
+use std::str::FromStr;
+use std::vec;
+
+use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
+use unicode_xid::UnicodeXID;
+
+use {Delimiter, Group, Op, Spacing, TokenTree};
+
+#[derive(Clone, Debug)]
+pub struct TokenStream {
+ inner: Vec<TokenTree>,
+}
+
+#[derive(Debug)]
+pub struct LexError;
+
+impl TokenStream {
+ pub fn empty() -> TokenStream {
+ TokenStream { inner: Vec::new() }
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.inner.len() == 0
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+fn get_cursor(src: &str) -> Cursor {
+ // Create a dummy file & add it to the codemap
+ CODEMAP.with(|cm| {
+ let mut cm = cm.borrow_mut();
+ let name = format!("<parsed string {}>", cm.files.len());
+ let span = cm.add_file(&name, src);
+ Cursor {
+ rest: src,
+ off: span.lo,
+ }
+ })
+}
+
+#[cfg(not(procmacro2_semver_exempt))]
+fn get_cursor(src: &str) -> Cursor {
+ Cursor { rest: src }
+}
+
+impl FromStr for TokenStream {
+ type Err = LexError;
+
+ fn from_str(src: &str) -> Result<TokenStream, LexError> {
+ // Create a dummy file & add it to the codemap
+ let cursor = get_cursor(src);
+
+ match token_stream(cursor) {
+ Ok((input, output)) => {
+ if skip_whitespace(input).len() != 0 {
+ Err(LexError)
+ } else {
+ Ok(output.inner)
+ }
+ }
+ Err(LexError) => Err(LexError),
+ }
+ }
+}
+
+impl fmt::Display for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let mut joint = false;
+ for (i, tt) in self.inner.iter().enumerate() {
+ if i != 0 && !joint {
+ write!(f, " ")?;
+ }
+ joint = false;
+ match *tt {
+ TokenTree::Group(ref tt) => {
+ let (start, end) = match tt.delimiter() {
+ Delimiter::Parenthesis => ("(", ")"),
+ Delimiter::Brace => ("{", "}"),
+ Delimiter::Bracket => ("[", "]"),
+ Delimiter::None => ("", ""),
+ };
+ if tt.stream().inner.inner.len() == 0 {
+ write!(f, "{} {}", start, end)?
+ } else {
+ write!(f, "{} {} {}", start, tt.stream(), end)?
+ }
+ }
+ TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?,
+ TokenTree::Op(ref tt) => {
+ write!(f, "{}", tt.op())?;
+ match tt.spacing() {
+ Spacing::Alone => {}
+ Spacing::Joint => joint = true,
+ }
+ }
+ TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
+ }
+ }
+
+ Ok(())
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+impl From<::proc_macro::TokenStream> for TokenStream {
+ fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
+ inner
+ .to_string()
+ .parse()
+ .expect("compiler token stream parse failed")
+ }
+}
+
+#[cfg(feature = "proc-macro")]
+impl From<TokenStream> for ::proc_macro::TokenStream {
+ fn from(inner: TokenStream) -> ::proc_macro::TokenStream {
+ inner
+ .to_string()
+ .parse()
+ .expect("failed to parse to compiler tokens")
+ }
+}
+
+impl From<TokenTree> for TokenStream {
+ fn from(tree: TokenTree) -> TokenStream {
+ TokenStream { inner: vec![tree] }
+ }
+}
+
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
+ let mut v = Vec::new();
+
+ for token in streams.into_iter() {
+ v.push(token);
+ }
+
+ TokenStream { inner: v }
+ }
+}
+
+pub type TokenTreeIter = vec::IntoIter<TokenTree>;
+
+impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = TokenTreeIter;
+
+ fn into_iter(self) -> TokenTreeIter {
+ self.inner.into_iter()
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct FileName(String);
+
+#[cfg(procmacro2_semver_exempt)]
+impl fmt::Display for FileName {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[derive(Clone, PartialEq, Eq)]
+pub struct SourceFile {
+ name: FileName,
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl SourceFile {
+ /// Get the path to this source file as a string.
+ pub fn path(&self) -> &FileName {
+ &self.name
+ }
+
+ pub fn is_real(&self) -> bool {
+ // XXX(nika): Support real files in the future?
+ false
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl AsRef<FileName> for SourceFile {
+ fn as_ref(&self) -> &FileName {
+ self.path()
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl fmt::Debug for SourceFile {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("SourceFile")
+ .field("path", &self.path())
+ .field("is_real", &self.is_real())
+ .finish()
+ }
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct LineColumn {
+ pub line: usize,
+ pub column: usize,
+}
+
+#[cfg(procmacro2_semver_exempt)]
+thread_local! {
+ static CODEMAP: RefCell<Codemap> = RefCell::new(Codemap {
+ // NOTE: We start with a single dummy file which all call_site() and
+ // def_site() spans reference.
+ files: vec![FileInfo {
+ name: "<unspecified>".to_owned(),
+ span: Span { lo: 0, hi: 0 },
+ lines: vec![0],
+ }],
+ });
+}
+
+#[cfg(procmacro2_semver_exempt)]
+struct FileInfo {
+ name: String,
+ span: Span,
+ lines: Vec<usize>,
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl FileInfo {
+ fn offset_line_column(&self, offset: usize) -> LineColumn {
+ assert!(self.span_within(Span {
+ lo: offset as u32,
+ hi: offset as u32
+ }));
+ let offset = offset - self.span.lo as usize;
+ match self.lines.binary_search(&offset) {
+ Ok(found) => LineColumn {
+ line: found + 1,
+ column: 0,
+ },
+ Err(idx) => LineColumn {
+ line: idx,
+ column: offset - self.lines[idx - 1],
+ },
+ }
+ }
+
+ fn span_within(&self, span: Span) -> bool {
+ span.lo >= self.span.lo && span.hi <= self.span.hi
+ }
+}
+
+/// Computesthe offsets of each line in the given source string.
+#[cfg(procmacro2_semver_exempt)]
+fn lines_offsets(s: &str) -> Vec<usize> {
+ let mut lines = vec![0];
+ let mut prev = 0;
+ while let Some(len) = s[prev..].find('\n') {
+ prev += len + 1;
+ lines.push(prev);
+ }
+ lines
+}
+
+#[cfg(procmacro2_semver_exempt)]
+struct Codemap {
+ files: Vec<FileInfo>,
+}
+
+#[cfg(procmacro2_semver_exempt)]
+impl Codemap {
+ fn next_start_pos(&self) -> u32 {
+ // Add 1 so there's always space between files.
+ //
+ // We'll always have at least 1 file, as we initialize our files list
+ // with a dummy file.
+ self.files.last().unwrap().span.hi + 1
+ }
+
+ fn add_file(&mut self, name: &str, src: &str) -> Span {
+ let lines = lines_offsets(src);
+ let lo = self.next_start_pos();
+ // XXX(nika): Shouild we bother doing a checked cast or checked add here?
+ let span = Span {
+ lo: lo,
+ hi: lo + (src.len() as u32),
+ };
+
+ self.files.push(FileInfo {
+ name: name.to_owned(),
+ span: span,
+ lines: lines,
+ });
+
+ span
+ }
+
+ fn fileinfo(&self, span: Span) -> &FileInfo {
+ for file in &self.files {
+ if file.span_within(span) {
+ return file;
+ }
+ }
+ panic!("Invalid span with no related FileInfo!");
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+pub struct Span {
+ #[cfg(procmacro2_semver_exempt)]
+ lo: u32,
+ #[cfg(procmacro2_semver_exempt)]
+ hi: u32,
+}
+
+impl Span {
+ #[cfg(not(procmacro2_semver_exempt))]
+ pub fn call_site() -> Span {
+ Span {}
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn call_site() -> Span {
+ Span { lo: 0, hi: 0 }
+ }
+
+ pub fn def_site() -> Span {
+ Span::call_site()
+ }
+
+ pub fn resolved_at(&self, _other: Span) -> Span {
+ // Stable spans consist only of line/column information, so
+ // `resolved_at` and `located_at` only select which span the
+ // caller wants line/column information from.
+ *self
+ }
+
+ pub fn located_at(&self, other: Span) -> Span {
+ other
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn source_file(&self) -> SourceFile {
+ CODEMAP.with(|cm| {
+ let cm = cm.borrow();
+ let fi = cm.fileinfo(*self);
+ SourceFile {
+ name: FileName(fi.name.clone()),
+ }
+ })
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn start(&self) -> LineColumn {
+ CODEMAP.with(|cm| {
+ let cm = cm.borrow();
+ let fi = cm.fileinfo(*self);
+ fi.offset_line_column(self.lo as usize)
+ })
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn end(&self) -> LineColumn {
+ CODEMAP.with(|cm| {
+ let cm = cm.borrow();
+ let fi = cm.fileinfo(*self);
+ fi.offset_line_column(self.hi as usize)
+ })
+ }
+
+ #[cfg(procmacro2_semver_exempt)]
+ pub fn join(&self, other: Span) -> Option<Span> {
+ CODEMAP.with(|cm| {
+ let cm = cm.borrow();
+ // If `other` is not within the same FileInfo as us, return None.
+ if !cm.fileinfo(*self).span_within(other) {
+ return None;
+ }
+ Some(Span {
+ lo: cmp::min(self.lo, other.lo),
+ hi: cmp::max(self.hi, other.hi),
+ })
+ })
+ }
+}
+
+#[derive(Copy, Clone)]
+pub struct Term {
+ intern: usize,
+ span: Span,
+}
+
+thread_local!(static SYMBOLS: RefCell<Interner> = RefCell::new(Interner::new()));
+
+impl Term {
+ pub fn new(string: &str, span: Span) -> Term {
+ Term {
+ intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)),
+ span: span,
+ }
+ }
+
+ pub fn as_str(&self) -> &str {
+ SYMBOLS.with(|interner| {
+ let interner = interner.borrow();
+ let s = interner.get(self.intern);
+ unsafe { &*(s as *const str) }
+ })
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+impl fmt::Debug for Term {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("Term").field(&self.as_str()).finish()
+ }
+}
+
+struct Interner {
+ string_to_index: HashMap<MyRc, usize>,
+ index_to_string: Vec<Rc<String>>,
+}
+
+#[derive(Hash, Eq, PartialEq)]
+struct MyRc(Rc<String>);
+
+impl Borrow<str> for MyRc {
+ fn borrow(&self) -> &str {
+ &self.0
+ }
+}
+
+impl Interner {
+ fn new() -> Interner {
+ Interner {
+ string_to_index: HashMap::new(),
+ index_to_string: Vec::new(),
+ }
+ }
+
+ fn intern(&mut self, s: &str) -> usize {
+ if let Some(&idx) = self.string_to_index.get(s) {
+ return idx;
+ }
+ let s = Rc::new(s.to_string());
+ self.index_to_string.push(s.clone());
+ self.string_to_index
+ .insert(MyRc(s), self.index_to_string.len() - 1);
+ self.index_to_string.len() - 1
+ }
+
+ fn get(&self, idx: usize) -> &str {
+ &self.index_to_string[idx]
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Literal {
+ text: String,
+ span: Span,
+}
+
+macro_rules! suffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(format!(concat!("{}", stringify!($kind)), n))
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_numbers {
+ ($($name:ident => $kind:ident,)*) => ($(
+ pub fn $name(n: $kind) -> Literal {
+ Literal::_new(n.to_string())
+ }
+ )*)
+}
+
+impl Literal {
+ fn _new(text: String) -> Literal {
+ Literal {
+ text: text,
+ span: Span::call_site(),
+ }
+ }
+
+ suffixed_numbers! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ isize_suffixed => isize,
+
+ f32_suffixed => f32,
+ f64_suffixed => f64,
+ }
+
+ unsuffixed_numbers! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ isize_unsuffixed => isize,
+ }
+
+ pub fn f32_unsuffixed(f: f32) -> Literal {
+ let mut s = f.to_string();
+ if !s.contains(".") {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn f64_unsuffixed(f: f64) -> Literal {
+ let mut s = f.to_string();
+ if !s.contains(".") {
+ s.push_str(".0");
+ }
+ Literal::_new(s)
+ }
+
+ pub fn string(t: &str) -> Literal {
+ let mut s = t.chars()
+ .flat_map(|c| c.escape_default())
+ .collect::<String>();
+ s.push('"');
+ s.insert(0, '"');
+ Literal::_new(s)
+ }
+
+ pub fn character(t: char) -> Literal {
+ Literal::_new(format!("'{}'", t.escape_default().collect::<String>()))
+ }
+
+ pub fn byte_string(bytes: &[u8]) -> Literal {
+ let mut escaped = "b\"".to_string();
+ for b in bytes {
+ match *b {
+ b'\0' => escaped.push_str(r"\0"),
+ b'\t' => escaped.push_str(r"\t"),
+ b'\n' => escaped.push_str(r"\n"),
+ b'\r' => escaped.push_str(r"\r"),
+ b'"' => escaped.push_str("\\\""),
+ b'\\' => escaped.push_str("\\\\"),
+ b'\x20'...b'\x7E' => escaped.push(*b as char),
+ _ => escaped.push_str(&format!("\\x{:02X}", b)),
+ }
+ }
+ escaped.push('"');
+ Literal::_new(escaped)
+ }
+
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.text.fmt(f)
+ }
+}
+
+fn token_stream(mut input: Cursor) -> PResult<::TokenStream> {
+ let mut trees = Vec::new();
+ loop {
+ let input_no_ws = skip_whitespace(input);
+ if input_no_ws.rest.len() == 0 {
+ break
+ }
+ if let Ok((a, tokens)) = doc_comment(input_no_ws) {
+ input = a;
+ trees.extend(tokens);
+ continue
+ }
+
+ let (a, tt) = match token_tree(input_no_ws) {
+ Ok(p) => p,
+ Err(_) => break,
+ };
+ trees.push(tt);
+ input = a;
+ }
+ Ok((input, ::TokenStream::_new(TokenStream { inner: trees })))
+}
+
+#[cfg(not(procmacro2_semver_exempt))]
+fn spanned<'a, T>(
+ input: Cursor<'a>,
+ f: fn(Cursor<'a>) -> PResult<'a, T>,
+) -> PResult<'a, (T, ::Span)> {
+ let (a, b) = f(skip_whitespace(input))?;
+ Ok((a, ((b, ::Span::_new(Span { })))))
+}
+
+#[cfg(procmacro2_semver_exempt)]
+fn spanned<'a, T>(
+ input: Cursor<'a>,
+ f: fn(Cursor<'a>) -> PResult<'a, T>,
+) -> PResult<'a, (T, ::Span)> {
+ let input = skip_whitespace(input);
+ let lo = input.off;
+ let (a, b) = f(input)?;
+ let hi = a.off;
+ let span = ::Span::_new(Span { lo: lo, hi: hi });
+ Ok((a, (b, span)))
+}
+
+fn token_tree(input: Cursor) -> PResult<TokenTree> {
+ let (rest, (mut tt, span)) = spanned(input, token_kind)?;
+ tt.set_span(span);
+ Ok((rest, tt))
+}
+
+named!(token_kind -> TokenTree, alt!(
+ map!(group, TokenTree::Group)
+ |
+ map!(literal, TokenTree::Literal) // must be before symbol
+ |
+ symbol
+ |
+ map!(op, TokenTree::Op)
+));
+
+named!(group -> Group, alt!(
+ delimited!(
+ punct!("("),
+ token_stream,
+ punct!(")")
+ ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+ |
+ delimited!(
+ punct!("["),
+ token_stream,
+ punct!("]")
+ ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+ |
+ delimited!(
+ punct!("{"),
+ token_stream,
+ punct!("}")
+ ) => { |ts| Group::new(Delimiter::Brace, ts) }
+));
+
+fn symbol(mut input: Cursor) -> PResult<TokenTree> {
+ input = skip_whitespace(input);
+
+ let mut chars = input.char_indices();
+
+ let lifetime = input.starts_with("'");
+ if lifetime {
+ chars.next();
+ }
+
+ let raw = !lifetime && input.starts_with("r#");
+ if raw {
+ chars.next();
+ chars.next();
+ }
+
+ match chars.next() {
+ Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {}
+ _ => return Err(LexError),
+ }
+
+ let mut end = input.len();
+ for (i, ch) in chars {
+ if !UnicodeXID::is_xid_continue(ch) {
+ end = i;
+ break;
+ }
+ }
+
+ let a = &input.rest[..end];
+ if a == "r#_" || lifetime && a != "'static" && KEYWORDS.contains(&&a[1..]) {
+ Err(LexError)
+ } else if a == "_" {
+ Ok((input.advance(end), Op::new('_', Spacing::Alone).into()))
+ } else {
+ Ok((
+ input.advance(end),
+ ::Term::new(a, ::Span::call_site()).into(),
+ ))
+ }
+}
+
+// From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs
+static KEYWORDS: &'static [&'static str] = &[
+ "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
+ "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
+ "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure",
+ "ref", "return", "self", "Self", "sizeof", "static", "struct", "super", "trait", "true",
+ "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
+];
+
+fn literal(input: Cursor) -> PResult<::Literal> {
+ let input_no_ws = skip_whitespace(input);
+
+ match literal_nocapture(input_no_ws) {
+ Ok((a, ())) => {
+ let start = input.len() - input_no_ws.len();
+ let len = input_no_ws.len() - a.len();
+ let end = start + len;
+ Ok((
+ a,
+ ::Literal::_new(Literal::_new(input.rest[start..end].to_string())),
+ ))
+ }
+ Err(LexError) => Err(LexError),
+ }
+}
+
+named!(literal_nocapture -> (), alt!(
+ string
+ |
+ byte_string
+ |
+ byte
+ |
+ character
+ |
+ float
+ |
+ int
+));
+
+named!(string -> (), alt!(
+ quoted_string
+ |
+ preceded!(
+ punct!("r"),
+ raw_string
+ ) => { |_| () }
+));
+
+named!(quoted_string -> (), delimited!(
+ punct!("\""),
+ cooked_string,
+ tag!("\"")
+));
+
+fn cooked_string(input: Cursor) -> PResult<()> {
+ let mut chars = input.char_indices().peekable();
+ while let Some((byte_offset, ch)) = chars.next() {
+ match ch {
+ '"' => {
+ return Ok((input.advance(byte_offset), ()));
+ }
+ '\r' => {
+ if let Some((_, '\n')) = chars.next() {
+ // ...
+ } else {
+ break;
+ }
+ }
+ '\\' => match chars.next() {
+ Some((_, 'x')) => {
+ if !backslash_x_char(&mut chars) {
+ break;
+ }
+ }
+ Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
+ | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
+ Some((_, 'u')) => {
+ if !backslash_u(&mut chars) {
+ break;
+ }
+ }
+ Some((_, '\n')) | Some((_, '\r')) => {
+ while let Some(&(_, ch)) = chars.peek() {
+ if ch.is_whitespace() {
+ chars.next();
+ } else {
+ break;
+ }
+ }
+ }
+ _ => break,
+ },
+ _ch => {}
+ }
+ }
+ Err(LexError)
+}
+
+named!(byte_string -> (), alt!(
+ delimited!(
+ punct!("b\""),
+ cooked_byte_string,
+ tag!("\"")
+ ) => { |_| () }
+ |
+ preceded!(
+ punct!("br"),
+ raw_string
+ ) => { |_| () }
+));
+
+fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
+ let mut bytes = input.bytes().enumerate();
+ 'outer: while let Some((offset, b)) = bytes.next() {
+ match b {
+ b'"' => {
+ return Ok((input.advance(offset), ()));
+ }
+ b'\r' => {
+ if let Some((_, b'\n')) = bytes.next() {
+ // ...
+ } else {
+ break;
+ }
+ }
+ b'\\' => match bytes.next() {
+ Some((_, b'x')) => {
+ if !backslash_x_byte(&mut bytes) {
+ break;
+ }
+ }
+ Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
+ | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
+ Some((newline, b'\n')) | Some((newline, b'\r')) => {
+ let rest = input.advance(newline + 1);
+ for (offset, ch) in rest.char_indices() {
+ if !ch.is_whitespace() {
+ input = rest.advance(offset);
+ bytes = input.bytes().enumerate();
+ continue 'outer;
+ }
+ }
+ break;
+ }
+ _ => break,
+ },
+ b if b < 0x80 => {}
+ _ => break,
+ }
+ }
+ Err(LexError)
+}
+
+fn raw_string(input: Cursor) -> PResult<()> {
+ let mut chars = input.char_indices();
+ let mut n = 0;
+ while let Some((byte_offset, ch)) = chars.next() {
+ match ch {
+ '"' => {
+ n = byte_offset;
+ break;
+ }
+ '#' => {}
+ _ => return Err(LexError),
+ }
+ }
+ for (byte_offset, ch) in chars {
+ match ch {
+ '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
+ let rest = input.advance(byte_offset + 1 + n);
+ return Ok((rest, ()));
+ }
+ '\r' => {}
+ _ => {}
+ }
+ }
+ Err(LexError)
+}
+
+named!(byte -> (), do_parse!(
+ punct!("b") >>
+ tag!("'") >>
+ cooked_byte >>
+ tag!("'") >>
+ (())
+));
+
+fn cooked_byte(input: Cursor) -> PResult<()> {
+ let mut bytes = input.bytes().enumerate();
+ let ok = match bytes.next().map(|(_, b)| b) {
+ Some(b'\\') => match bytes.next().map(|(_, b)| b) {
+ Some(b'x') => backslash_x_byte(&mut bytes),
+ Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
+ | Some(b'"') => true,
+ _ => false,
+ },
+ b => b.is_some(),
+ };
+ if ok {
+ match bytes.next() {
+ Some((offset, _)) => {
+ if input.chars().as_str().is_char_boundary(offset) {
+ Ok((input.advance(offset), ()))
+ } else {
+ Err(LexError)
+ }
+ }
+ None => Ok((input.advance(input.len()), ())),
+ }
+ } else {
+ Err(LexError)
+ }
+}
+
+named!(character -> (), do_parse!(
+ punct!("'") >>
+ cooked_char >>
+ tag!("'") >>
+ (())
+));
+
+fn cooked_char(input: Cursor) -> PResult<()> {
+ let mut chars = input.char_indices();
+ let ok = match chars.next().map(|(_, ch)| ch) {
+ Some('\\') => match chars.next().map(|(_, ch)| ch) {
+ Some('x') => backslash_x_char(&mut chars),
+ Some('u') => backslash_u(&mut chars),
+ Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
+ true
+ }
+ _ => false,
+ },
+ ch => ch.is_some(),
+ };
+ if ok {
+ match chars.next() {
+ Some((idx, _)) => Ok((input.advance(idx), ())),
+ None => Ok((input.advance(input.len()), ())),
+ }
+ } else {
+ Err(LexError)
+ }
+}
+
+macro_rules! next_ch {
+ ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
+ match $chars.next() {
+ Some((_, ch)) => match ch {
+ $pat $(| $rest)* => ch,
+ _ => return false,
+ },
+ None => return false
+ }
+ };
+}
+
+fn backslash_x_char<I>(chars: &mut I) -> bool
+where
+ I: Iterator<Item = (usize, char)>,
+{
+ next_ch!(chars @ '0'...'7');
+ next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
+ true
+}
+
+fn backslash_x_byte<I>(chars: &mut I) -> bool
+where
+ I: Iterator<Item = (usize, u8)>,
+{
+ next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
+ next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
+ true
+}
+
+fn backslash_u<I>(chars: &mut I) -> bool
+where
+ I: Iterator<Item = (usize, char)>,
+{
+ next_ch!(chars @ '{');
+ next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
+ loop {
+ let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '_' | '}');
+ if c == '}' {
+ return true;
+ }
+ }
+}
+
+fn float(input: Cursor) -> PResult<()> {
+ let (rest, ()) = float_digits(input)?;
+ for suffix in &["f32", "f64"] {
+ if rest.starts_with(suffix) {
+ return word_break(rest.advance(suffix.len()));
+ }
+ }
+ word_break(rest)
+}
+
+fn float_digits(input: Cursor) -> PResult<()> {
+ let mut chars = input.chars().peekable();
+ match chars.next() {
+ Some(ch) if ch >= '0' && ch <= '9' => {}
+ _ => return Err(LexError),
+ }
+
+ let mut len = 1;
+ let mut has_dot = false;
+ let mut has_exp = false;
+ while let Some(&ch) = chars.peek() {
+ match ch {
+ '0'...'9' | '_' => {
+ chars.next();
+ len += 1;
+ }
+ '.' => {
+ if has_dot {
+ break;
+ }
+ chars.next();
+ if chars
+ .peek()
+ .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
+ .unwrap_or(false)
+ {
+ return Err(LexError);
+ }
+ len += 1;
+ has_dot = true;
+ }
+ 'e' | 'E' => {
+ chars.next();
+ len += 1;
+ has_exp = true;
+ break;
+ }
+ _ => break,
+ }
+ }
+
+ let rest = input.advance(len);
+ if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
+ return Err(LexError);
+ }
+
+ if has_exp {
+ let mut has_exp_value = false;
+ while let Some(&ch) = chars.peek() {
+ match ch {
+ '+' | '-' => {
+ if has_exp_value {
+ break;
+ }
+ chars.next();
+ len += 1;
+ }
+ '0'...'9' => {
+ chars.next();
+ len += 1;
+ has_exp_value = true;
+ }
+ '_' => {
+ chars.next();
+ len += 1;
+ }
+ _ => break,
+ }
+ }
+ if !has_exp_value {
+ return Err(LexError);
+ }
+ }
+
+ Ok((input.advance(len), ()))
+}
+
+fn int(input: Cursor) -> PResult<()> {
+ let (rest, ()) = digits(input)?;
+ for suffix in &[
+ "isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128"
+ ] {
+ if rest.starts_with(suffix) {
+ return word_break(rest.advance(suffix.len()));
+ }
+ }
+ word_break(rest)
+}
+
+fn digits(mut input: Cursor) -> PResult<()> {
+ let base = if input.starts_with("0x") {
+ input = input.advance(2);
+ 16
+ } else if input.starts_with("0o") {
+ input = input.advance(2);
+ 8
+ } else if input.starts_with("0b") {
+ input = input.advance(2);
+ 2
+ } else {
+ 10
+ };
+
+ let mut len = 0;
+ let mut empty = true;
+ for b in input.bytes() {
+ let digit = match b {
+ b'0'...b'9' => (b - b'0') as u64,
+ b'a'...b'f' => 10 + (b - b'a') as u64,
+ b'A'...b'F' => 10 + (b - b'A') as u64,
+ b'_' => {
+ if empty && base == 10 {
+ return Err(LexError);
+ }
+ len += 1;
+ continue;
+ }
+ _ => break,
+ };
+ if digit >= base {
+ return Err(LexError);
+ }
+ len += 1;
+ empty = false;
+ }
+ if empty {
+ Err(LexError)
+ } else {
+ Ok((input.advance(len), ()))
+ }
+}
+
+fn op(input: Cursor) -> PResult<Op> {
+ let input = skip_whitespace(input);
+ match op_char(input) {
+ Ok((rest, ch)) => {
+ let kind = match op_char(rest) {
+ Ok(_) => Spacing::Joint,
+ Err(LexError) => Spacing::Alone,
+ };
+ Ok((rest, Op::new(ch, kind)))
+ }
+ Err(LexError) => Err(LexError),
+ }
+}
+
+fn op_char(input: Cursor) -> PResult<char> {
+ let mut chars = input.chars();
+ let first = match chars.next() {
+ Some(ch) => ch,
+ None => {
+ return Err(LexError);
+ }
+ };
+ let recognized = "~!@#$%^&*-=+|;:,<.>/?";
+ if recognized.contains(first) {
+ Ok((input.advance(first.len_utf8()), first))
+ } else {
+ Err(LexError)
+ }
+}
+
+fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
+ let mut trees = Vec::new();
+ let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
+ trees.push(TokenTree::Op(Op::new('#', Spacing::Alone)));
+ if inner {
+ trees.push(Op::new('!', Spacing::Alone).into());
+ }
+ let mut stream = vec![
+ TokenTree::Term(::Term::new("doc", span)),
+ TokenTree::Op(Op::new('=', Spacing::Alone)),
+ TokenTree::Literal(::Literal::string(comment)),
+ ];
+ for tt in stream.iter_mut() {
+ tt.set_span(span);
+ }
+ trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into());
+ for tt in trees.iter_mut() {
+ tt.set_span(span);
+ }
+ Ok((rest, trees))
+}
+
+named!(doc_comment_contents -> (&str, bool), alt!(
+ do_parse!(
+ punct!("//!") >>
+ s: take_until_newline_or_eof!() >>
+ ((s, true))
+ )
+ |
+ do_parse!(
+ option!(whitespace) >>
+ peek!(tag!("/*!")) >>
+ s: block_comment >>
+ ((s, true))
+ )
+ |
+ do_parse!(
+ punct!("///") >>
+ not!(tag!("/")) >>
+ s: take_until_newline_or_eof!() >>
+ ((s, false))
+ )
+ |
+ do_parse!(
+ option!(whitespace) >>
+ peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
+ s: block_comment >>
+ ((s, false))
+ )
+));
rename from third_party/rust/proc-macro2-0.3.6/src/strnom.rs
rename to third_party/rust/proc-macro2-0.3.5/src/strnom.rs
rename from third_party/rust/proc-macro2-0.3.6/src/unstable.rs
rename to third_party/rust/proc-macro2-0.3.5/src/unstable.rs
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2-0.3.5/tests/test.rs
@@ -0,0 +1,235 @@
+extern crate proc_macro2;
+
+use std::str::{self, FromStr};
+
+use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
+
+#[test]
+fn symbols() {
+ assert_eq!(Term::new("foo", Span::call_site()).as_str(), "foo");
+ assert_eq!(Term::new("bar", Span::call_site()).as_str(), "bar");
+}
+
+#[test]
+fn literals() {
+ assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
+ assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
+ assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
+}
+
+#[test]
+fn roundtrip() {
+ fn roundtrip(p: &str) {
+ println!("parse: {}", p);
+ let s = p.parse::<TokenStream>().unwrap().to_string();
+ println!("first: {}", s);
+ let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
+ assert_eq!(s, s2);
+ }
+ roundtrip("a");
+ roundtrip("<<");
+ roundtrip("<<=");
+ roundtrip(
+ "
+ 1
+ 1.0
+ 1f32
+ 2f64
+ 1usize
+ 4isize
+ 4e10
+ 1_000
+ 1_0i32
+ 8u8
+ 9
+ 0
+ 0xffffffffffffffffffffffffffffffff
+ ",
+ );
+ roundtrip("'a");
+ roundtrip("'static");
+ roundtrip("'\\u{10__FFFF}'");
+ roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
+}
+
+#[test]
+fn fail() {
+ fn fail(p: &str) {
+ if p.parse::<TokenStream>().is_ok() {
+ panic!("should have failed to parse: {}", p);
+ }
+ }
+ fail("1x");
+ fail("1u80");
+ fail("1f320");
+ fail("' static");
+ fail("'mut");
+ fail("r#1");
+ fail("r#_");
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[test]
+fn span_test() {
+ use proc_macro2::TokenTree;
+
+ fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
+ let ts = p.parse::<TokenStream>().unwrap();
+ check_spans_internal(ts, &mut lines);
+ }
+
+ fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
+ for i in ts {
+ if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
+ *lines = rest;
+
+ let start = i.span().start();
+ assert_eq!(start.line, sline, "sline did not match for {}", i);
+ assert_eq!(start.column, scol, "scol did not match for {}", i);
+
+ let end = i.span().end();
+ assert_eq!(end.line, eline, "eline did not match for {}", i);
+ assert_eq!(end.column, ecol, "ecol did not match for {}", i);
+
+ match i {
+ TokenTree::Group(ref g) => {
+ check_spans_internal(g.stream().clone(), lines);
+ }
+ _ => {}
+ }
+ }
+ }
+ }
+
+ check_spans(
+ "\
+/// This is a document comment
+testing 123
+{
+ testing 234
+}",
+ &[
+ (1, 0, 1, 30), // #
+ (1, 0, 1, 30), // [ ... ]
+ (1, 0, 1, 30), // doc
+ (1, 0, 1, 30), // =
+ (1, 0, 1, 30), // "This is..."
+ (2, 0, 2, 7), // testing
+ (2, 8, 2, 11), // 123
+ (3, 0, 5, 1), // { ... }
+ (4, 2, 4, 9), // testing
+ (4, 10, 4, 13), // 234
+ ],
+ );
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[cfg(not(feature = "nightly"))]
+#[test]
+fn default_span() {
+ let start = Span::call_site().start();
+ assert_eq!(start.line, 1);
+ assert_eq!(start.column, 0);
+ let end = Span::call_site().end();
+ assert_eq!(end.line, 1);
+ assert_eq!(end.column, 0);
+ let source_file = Span::call_site().source_file();
+ assert_eq!(source_file.path().to_string(), "<unspecified>");
+ assert!(!source_file.is_real());
+}
+
+#[cfg(procmacro2_semver_exempt)]
+#[test]
+fn span_join() {
+ let source1 = "aaa\nbbb"
+ .parse::<TokenStream>()
+ .unwrap()
+ .into_iter()
+ .collect::<Vec<_>>();
+ let source2 = "ccc\nddd"
+ .parse::<TokenStream>()
+ .unwrap()
+ .into_iter()
+ .collect::<Vec<_>>();
+
+ assert!(source1[0].span().source_file() != source2[0].span().source_file());
+ assert_eq!(
+ source1[0].span().source_file(),
+ source1[1].span().source_file()
+ );
+
+ let joined1 = source1[0].span().join(source1[1].span());
+ let joined2 = source1[0].span().join(source2[0].span());
+ assert!(joined1.is_some());
+ assert!(joined2.is_none());
+
+ let start = joined1.unwrap().start();
+ let end = joined1.unwrap().end();
+ assert_eq!(start.line, 1);
+ assert_eq!(start.column, 0);
+ assert_eq!(end.line, 2);
+ assert_eq!(end.column, 3);
+
+ assert_eq!(
+ joined1.unwrap().source_file(),
+ source1[0].span().source_file()
+ );
+}
+
+#[test]
+fn no_panic() {
+ let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
+ assert!(s.parse::<proc_macro2::TokenStream>().is_err());
+}
+
+#[test]
+fn tricky_doc_comment() {
+ let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
+ let tokens = stream.into_iter().collect::<Vec<_>>();
+ assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
+
+ let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
+ let tokens = stream.into_iter().collect::<Vec<_>>();
+ assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
+ match tokens[0] {
+ proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
+ _ => panic!("wrong token {:?}", tokens[0]),
+ }
+ let mut tokens = match tokens[1] {
+ proc_macro2::TokenTree::Group(ref tt) => {
+ assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
+ tt.stream().into_iter()
+ }
+ _ => panic!("wrong token {:?}", tokens[0]),
+ };
+
+ match tokens.next().unwrap() {
+ proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
+ t => panic!("wrong token {:?}", t),
+ }
+ match tokens.next().unwrap() {
+ proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
+ t => panic!("wrong token {:?}", t),
+ }
+ match tokens.next().unwrap() {
+ proc_macro2::TokenTree::Literal(ref tt) => {
+ assert_eq!(tt.to_string(), "\" doc\"");
+ }
+ t => panic!("wrong token {:?}", t),
+ }
+ assert!(tokens.next().is_none());
+
+ let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
+ let tokens = stream.into_iter().collect::<Vec<_>>();
+ assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
+}
+
+#[test]
+fn raw_identifier() {
+ let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
+ match tts.next().unwrap() {
+ TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
+ wrong => panic!("wrong token {:?}", wrong),
+ }
+ assert!(tts.next().is_none());
+}
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.3.6/.cargo-checksum.json
+++ /dev/null
@@ -1,1 +0,0 @@
-{"files":{".travis.yml":"872a0d195dcb1e84f28aa994f301c7139f70360bb42dee3954df5ee965efea15","Cargo.toml":"6ed5d7b9bf8805abd76f9e2a9be99b98e2cb70d9b97980b8aa09b6082d26a94d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"ce05336717e1e90724491a2f54487c41c752fa2d32396639439f7c6d0f1e6776","src/lib.rs":"e99fedcb4b410c626fe1a3ab722c8b4f98baed2c64c2dff28c4eb62da354f2e2","src/stable.rs":"fd8d86f7542d211030056a7cdcc58b86131180d54f461910a4a067269eee9d4a","src/strnom.rs":"129fe22f0b50e5a64fca82e731c959135381c910e19f3305ef35420e0aadde08","src/unstable.rs":"b43c713ac16d9de0ba0fa1b9bebe390122b4ad60ef2fc75408f721305fdcd46b","tests/test.rs":"a8229931093cd6b39f759c60ef097e59bc43c98f1b0e5eea06ecc8d5d0879853"},"package":"49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118"}
\ No newline at end of file
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.3.6/Cargo.toml
+++ /dev/null
@@ -1,33 +0,0 @@
-# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
-#
-# When uploading crates to the registry Cargo will automatically
-# "normalize" Cargo.toml files for maximal compatibility
-# with all versions of Cargo and also rewrite `path` dependencies
-# to registry (e.g. crates.io) dependencies
-#
-# If you believe there's an error in this file please file an
-# issue against the rust-lang/cargo repository. If you're
-# editing this file be aware that the upstream Cargo.toml
-# will likely look very different (and much more reasonable)
-
-[package]
-name = "proc-macro2"
-version = "0.3.6"
-authors = ["Alex Crichton <alex@alexcrichton.com>"]
-description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
-homepage = "https://github.com/alexcrichton/proc-macro2"
-documentation = "https://docs.rs/proc-macro2"
-readme = "README.md"
-keywords = ["macros"]
-license = "MIT/Apache-2.0"
-repository = "https://github.com/alexcrichton/proc-macro2"
-
-[lib]
-doctest = false
-[dependencies.unicode-xid]
-version = "0.1"
-
-[features]
-default = ["proc-macro"]
-nightly = ["proc-macro"]
-proc-macro = []
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.3.6/src/lib.rs
+++ /dev/null
@@ -1,590 +0,0 @@
-//! A "shim crate" intended to multiplex the [`proc_macro`] API on to stable
-//! Rust.
-//!
-//! Procedural macros in Rust operate over the upstream
-//! [`proc_macro::TokenStream`][ts] type. This type currently is quite
-//! conservative and exposed no internal implementation details. Nightly
-//! compilers, however, contain a much richer interface. This richer interface
-//! allows fine-grained inspection of the token stream which avoids
-//! stringification/re-lexing and also preserves span information.
-//!
-//! The upcoming APIs added to [`proc_macro`] upstream are the foundation for
-//! productive procedural macros in the ecosystem. To help prepare the ecosystem
-//! for using them this crate serves to both compile on stable and nightly and
-//! mirrors the API-to-be. The intention is that procedural macros which switch
-//! to use this crate will be trivially able to switch to the upstream
-//! `proc_macro` crate once its API stabilizes.
-//!
-//! In the meantime this crate also has a `nightly` Cargo feature which
-//! enables it to reimplement itself with the unstable API of [`proc_macro`].
-//! This'll allow immediate usage of the beneficial upstream API, particularly
-//! around preserving span information.
-//!
-//! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
-//! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
-
-// Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.6")]
-#![cfg_attr(feature = "nightly", feature(proc_macro))]
-
-#[cfg(feature = "proc-macro")]
-extern crate proc_macro;
-
-#[cfg(not(feature = "nightly"))]
-extern crate unicode_xid;
-
-use std::fmt;
-use std::iter::FromIterator;
-use std::marker;
-use std::rc::Rc;
-use std::str::FromStr;
-
-#[macro_use]
-#[cfg(not(feature = "nightly"))]
-mod strnom;
-
-#[path = "stable.rs"]
-#[cfg(not(feature = "nightly"))]
-mod imp;
-#[path = "unstable.rs"]
-#[cfg(feature = "nightly")]
-mod imp;
-
-#[derive(Clone)]
-pub struct TokenStream {
- inner: imp::TokenStream,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-pub struct LexError {
- inner: imp::LexError,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-impl TokenStream {
- fn _new(inner: imp::TokenStream) -> TokenStream {
- TokenStream {
- inner: inner,
- _marker: marker::PhantomData,
- }
- }
-
- pub fn empty() -> TokenStream {
- TokenStream::_new(imp::TokenStream::empty())
- }
-
- pub fn is_empty(&self) -> bool {
- self.inner.is_empty()
- }
-}
-
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- let e = src.parse().map_err(|e| LexError {
- inner: e,
- _marker: marker::PhantomData,
- })?;
- Ok(TokenStream::_new(e))
- }
-}
-
-#[cfg(feature = "proc-macro")]
-impl From<proc_macro::TokenStream> for TokenStream {
- fn from(inner: proc_macro::TokenStream) -> TokenStream {
- TokenStream::_new(inner.into())
- }
-}
-
-#[cfg(feature = "proc-macro")]
-impl From<TokenStream> for proc_macro::TokenStream {
- fn from(inner: TokenStream) -> proc_macro::TokenStream {
- inner.inner.into()
- }
-}
-
-impl FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
- TokenStream::_new(streams.into_iter().collect())
- }
-}
-
-impl fmt::Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-impl fmt::Debug for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-impl fmt::Debug for LexError {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-// Returned by reference, so we can't easily wrap it.
-#[cfg(procmacro2_semver_exempt)]
-pub use imp::FileName;
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile(imp::SourceFile);
-
-#[cfg(procmacro2_semver_exempt)]
-impl SourceFile {
- /// Get the path to this source file as a string.
- pub fn path(&self) -> &FileName {
- self.0.path()
- }
-
- pub fn is_real(&self) -> bool {
- self.0.is_real()
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl AsRef<FileName> for SourceFile {
- fn as_ref(&self) -> &FileName {
- self.0.path()
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl fmt::Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.0.fmt(f)
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-pub struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
-#[derive(Copy, Clone)]
-pub struct Span {
- inner: imp::Span,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-impl Span {
- fn _new(inner: imp::Span) -> Span {
- Span {
- inner: inner,
- _marker: marker::PhantomData,
- }
- }
-
- pub fn call_site() -> Span {
- Span::_new(imp::Span::call_site())
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn def_site() -> Span {
- Span::_new(imp::Span::def_site())
- }
-
- /// Creates a new span with the same line/column information as `self` but
- /// that resolves symbols as though it were at `other`.
- #[cfg(procmacro2_semver_exempt)]
- pub fn resolved_at(&self, other: Span) -> Span {
- Span::_new(self.inner.resolved_at(other.inner))
- }
-
- /// Creates a new span with the same name resolution behavior as `self` but
- /// with the line/column information of `other`.
- #[cfg(procmacro2_semver_exempt)]
- pub fn located_at(&self, other: Span) -> Span {
- Span::_new(self.inner.located_at(other.inner))
- }
-
- /// This method is only available when the `"nightly"` feature is enabled.
- #[cfg(all(feature = "nightly", feature = "proc-macro"))]
- pub fn unstable(self) -> proc_macro::Span {
- self.inner.unstable()
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn source_file(&self) -> SourceFile {
- SourceFile(self.inner.source_file())
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn start(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.start();
- LineColumn {
- line: line,
- column: column,
- }
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn end(&self) -> LineColumn {
- let imp::LineColumn { line, column } = self.inner.end();
- LineColumn {
- line: line,
- column: column,
- }
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn join(&self, other: Span) -> Option<Span> {
- self.inner.join(other.inner).map(Span::_new)
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn eq(&self, other: &Span) -> bool {
- self.inner.eq(&other.inner)
- }
-}
-
-impl fmt::Debug for Span {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-#[derive(Clone, Debug)]
-pub enum TokenTree {
- Group(Group),
- Term(Term),
- Op(Op),
- Literal(Literal),
-}
-
-impl TokenTree {
- pub fn span(&self) -> Span {
- match *self {
- TokenTree::Group(ref t) => t.span(),
- TokenTree::Term(ref t) => t.span(),
- TokenTree::Op(ref t) => t.span(),
- TokenTree::Literal(ref t) => t.span(),
- }
- }
-
- pub fn set_span(&mut self, span: Span) {
- match *self {
- TokenTree::Group(ref mut t) => t.set_span(span),
- TokenTree::Term(ref mut t) => t.set_span(span),
- TokenTree::Op(ref mut t) => t.set_span(span),
- TokenTree::Literal(ref mut t) => t.set_span(span),
- }
- }
-}
-
-impl From<Group> for TokenTree {
- fn from(g: Group) -> TokenTree {
- TokenTree::Group(g)
- }
-}
-
-impl From<Term> for TokenTree {
- fn from(g: Term) -> TokenTree {
- TokenTree::Term(g)
- }
-}
-
-impl From<Op> for TokenTree {
- fn from(g: Op) -> TokenTree {
- TokenTree::Op(g)
- }
-}
-
-impl From<Literal> for TokenTree {
- fn from(g: Literal) -> TokenTree {
- TokenTree::Literal(g)
- }
-}
-
-impl fmt::Display for TokenTree {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- match *self {
- TokenTree::Group(ref t) => t.fmt(f),
- TokenTree::Term(ref t) => t.fmt(f),
- TokenTree::Op(ref t) => t.fmt(f),
- TokenTree::Literal(ref t) => t.fmt(f),
- }
- }
-}
-
-#[derive(Clone, Debug)]
-pub struct Group {
- delimiter: Delimiter,
- stream: TokenStream,
- span: Span,
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Delimiter {
- Parenthesis,
- Brace,
- Bracket,
- None,
-}
-
-impl Group {
- pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
- Group {
- delimiter: delimiter,
- stream: stream,
- span: Span::call_site(),
- }
- }
-
- pub fn delimiter(&self) -> Delimiter {
- self.delimiter
- }
-
- pub fn stream(&self) -> TokenStream {
- self.stream.clone()
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-impl fmt::Display for Group {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.stream.fmt(f)
- }
-}
-
-#[derive(Copy, Clone, Debug)]
-pub struct Op {
- op: char,
- spacing: Spacing,
- span: Span,
-}
-
-#[derive(Copy, Clone, Debug, Eq, PartialEq)]
-pub enum Spacing {
- Alone,
- Joint,
-}
-
-impl Op {
- pub fn new(op: char, spacing: Spacing) -> Op {
- Op {
- op: op,
- spacing: spacing,
- span: Span::call_site(),
- }
- }
-
- pub fn op(&self) -> char {
- self.op
- }
-
- pub fn spacing(&self) -> Spacing {
- self.spacing
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-impl fmt::Display for Op {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.op.fmt(f)
- }
-}
-
-#[derive(Copy, Clone)]
-pub struct Term {
- inner: imp::Term,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-impl Term {
- fn _new(inner: imp::Term) -> Term {
- Term {
- inner: inner,
- _marker: marker::PhantomData,
- }
- }
-
- pub fn new(string: &str, span: Span) -> Term {
- Term::_new(imp::Term::new(string, span.inner))
- }
-
- pub fn as_str(&self) -> &str {
- self.inner.as_str()
- }
-
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner);
- }
-}
-
-impl fmt::Display for Term {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.as_str().fmt(f)
- }
-}
-
-impl fmt::Debug for Term {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-#[derive(Clone)]
-pub struct Literal {
- inner: imp::Literal,
- _marker: marker::PhantomData<Rc<()>>,
-}
-
-macro_rules! int_literals {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(imp::Literal::$name(n))
- }
- )*)
-}
-
-impl Literal {
- fn _new(inner: imp::Literal) -> Literal {
- Literal {
- inner: inner,
- _marker: marker::PhantomData,
- }
- }
-
- int_literals! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- isize_suffixed => isize,
-
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- isize_unsuffixed => isize,
- }
-
- pub fn f64_unsuffixed(f: f64) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f64_unsuffixed(f))
- }
-
- pub fn f64_suffixed(f: f64) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f64_suffixed(f))
- }
-
- pub fn f32_unsuffixed(f: f32) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f32_unsuffixed(f))
- }
-
- pub fn f32_suffixed(f: f32) -> Literal {
- assert!(f.is_finite());
- Literal::_new(imp::Literal::f32_suffixed(f))
- }
-
- pub fn string(string: &str) -> Literal {
- Literal::_new(imp::Literal::string(string))
- }
-
- pub fn character(ch: char) -> Literal {
- Literal::_new(imp::Literal::character(ch))
- }
-
- pub fn byte_string(s: &[u8]) -> Literal {
- Literal::_new(imp::Literal::byte_string(s))
- }
-
- pub fn span(&self) -> Span {
- Span::_new(self.inner.span())
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.inner.set_span(span.inner);
- }
-}
-
-impl fmt::Debug for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-impl fmt::Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
-}
-
-pub mod token_stream {
- use std::fmt;
- use std::marker;
- use std::rc::Rc;
-
- pub use TokenStream;
- use TokenTree;
- use imp;
-
- pub struct IntoIter {
- inner: imp::TokenTreeIter,
- _marker: marker::PhantomData<Rc<()>>,
- }
-
- impl Iterator for IntoIter {
- type Item = TokenTree;
-
- fn next(&mut self) -> Option<TokenTree> {
- self.inner.next()
- }
- }
-
- impl fmt::Debug for IntoIter {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
- }
- }
-
- impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = IntoIter;
-
- fn into_iter(self) -> IntoIter {
- IntoIter {
- inner: self.inner.into_iter(),
- _marker: marker::PhantomData,
- }
- }
- }
-}
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.3.6/src/stable.rs
+++ /dev/null
@@ -1,1265 +0,0 @@
-#![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
-
-use std::borrow::Borrow;
-use std::cell::RefCell;
-#[cfg(procmacro2_semver_exempt)]
-use std::cmp;
-use std::collections::HashMap;
-use std::fmt;
-use std::iter;
-use std::rc::Rc;
-use std::str::FromStr;
-use std::vec;
-
-use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
-use unicode_xid::UnicodeXID;
-
-use {Delimiter, Group, Op, Spacing, TokenTree};
-
-#[derive(Clone, Debug)]
-pub struct TokenStream {
- inner: Vec<TokenTree>,
-}
-
-#[derive(Debug)]
-pub struct LexError;
-
-impl TokenStream {
- pub fn empty() -> TokenStream {
- TokenStream { inner: Vec::new() }
- }
-
- pub fn is_empty(&self) -> bool {
- self.inner.len() == 0
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-fn get_cursor(src: &str) -> Cursor {
- // Create a dummy file & add it to the codemap
- CODEMAP.with(|cm| {
- let mut cm = cm.borrow_mut();
- let name = format!("<parsed string {}>", cm.files.len());
- let span = cm.add_file(&name, src);
- Cursor {
- rest: src,
- off: span.lo,
- }
- })
-}
-
-#[cfg(not(procmacro2_semver_exempt))]
-fn get_cursor(src: &str) -> Cursor {
- Cursor { rest: src }
-}
-
-impl FromStr for TokenStream {
- type Err = LexError;
-
- fn from_str(src: &str) -> Result<TokenStream, LexError> {
- // Create a dummy file & add it to the codemap
- let cursor = get_cursor(src);
-
- match token_stream(cursor) {
- Ok((input, output)) => {
- if skip_whitespace(input).len() != 0 {
- Err(LexError)
- } else {
- Ok(output.inner)
- }
- }
- Err(LexError) => Err(LexError),
- }
- }
-}
-
-impl fmt::Display for TokenStream {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let mut joint = false;
- for (i, tt) in self.inner.iter().enumerate() {
- if i != 0 && !joint {
- write!(f, " ")?;
- }
- joint = false;
- match *tt {
- TokenTree::Group(ref tt) => {
- let (start, end) = match tt.delimiter() {
- Delimiter::Parenthesis => ("(", ")"),
- Delimiter::Brace => ("{", "}"),
- Delimiter::Bracket => ("[", "]"),
- Delimiter::None => ("", ""),
- };
- if tt.stream().inner.inner.len() == 0 {
- write!(f, "{} {}", start, end)?
- } else {
- write!(f, "{} {} {}", start, tt.stream(), end)?
- }
- }
- TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?,
- TokenTree::Op(ref tt) => {
- write!(f, "{}", tt.op())?;
- match tt.spacing() {
- Spacing::Alone => {}
- Spacing::Joint => joint = true,
- }
- }
- TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
- }
- }
-
- Ok(())
- }
-}
-
-#[cfg(feature = "proc-macro")]
-impl From<::proc_macro::TokenStream> for TokenStream {
- fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
- inner
- .to_string()
- .parse()
- .expect("compiler token stream parse failed")
- }
-}
-
-#[cfg(feature = "proc-macro")]
-impl From<TokenStream> for ::proc_macro::TokenStream {
- fn from(inner: TokenStream) -> ::proc_macro::TokenStream {
- inner
- .to_string()
- .parse()
- .expect("failed to parse to compiler tokens")
- }
-}
-
-impl From<TokenTree> for TokenStream {
- fn from(tree: TokenTree) -> TokenStream {
- TokenStream { inner: vec![tree] }
- }
-}
-
-impl iter::FromIterator<TokenTree> for TokenStream {
- fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
- let mut v = Vec::new();
-
- for token in streams.into_iter() {
- v.push(token);
- }
-
- TokenStream { inner: v }
- }
-}
-
-pub type TokenTreeIter = vec::IntoIter<TokenTree>;
-
-impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = TokenTreeIter;
-
- fn into_iter(self) -> TokenTreeIter {
- self.inner.into_iter()
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, PartialEq, Eq, Debug)]
-pub struct FileName(String);
-
-#[cfg(procmacro2_semver_exempt)]
-impl fmt::Display for FileName {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.0.fmt(f)
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile {
- name: FileName,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl SourceFile {
- /// Get the path to this source file as a string.
- pub fn path(&self) -> &FileName {
- &self.name
- }
-
- pub fn is_real(&self) -> bool {
- // XXX(nika): Support real files in the future?
- false
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl AsRef<FileName> for SourceFile {
- fn as_ref(&self) -> &FileName {
- self.path()
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl fmt::Debug for SourceFile {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("SourceFile")
- .field("path", &self.path())
- .field("is_real", &self.is_real())
- .finish()
- }
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub struct LineColumn {
- pub line: usize,
- pub column: usize,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-thread_local! {
- static CODEMAP: RefCell<Codemap> = RefCell::new(Codemap {
- // NOTE: We start with a single dummy file which all call_site() and
- // def_site() spans reference.
- files: vec![FileInfo {
- name: "<unspecified>".to_owned(),
- span: Span { lo: 0, hi: 0 },
- lines: vec![0],
- }],
- });
-}
-
-#[cfg(procmacro2_semver_exempt)]
-struct FileInfo {
- name: String,
- span: Span,
- lines: Vec<usize>,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl FileInfo {
- fn offset_line_column(&self, offset: usize) -> LineColumn {
- assert!(self.span_within(Span {
- lo: offset as u32,
- hi: offset as u32
- }));
- let offset = offset - self.span.lo as usize;
- match self.lines.binary_search(&offset) {
- Ok(found) => LineColumn {
- line: found + 1,
- column: 0,
- },
- Err(idx) => LineColumn {
- line: idx,
- column: offset - self.lines[idx - 1],
- },
- }
- }
-
- fn span_within(&self, span: Span) -> bool {
- span.lo >= self.span.lo && span.hi <= self.span.hi
- }
-}
-
-/// Computesthe offsets of each line in the given source string.
-#[cfg(procmacro2_semver_exempt)]
-fn lines_offsets(s: &str) -> Vec<usize> {
- let mut lines = vec![0];
- let mut prev = 0;
- while let Some(len) = s[prev..].find('\n') {
- prev += len + 1;
- lines.push(prev);
- }
- lines
-}
-
-#[cfg(procmacro2_semver_exempt)]
-struct Codemap {
- files: Vec<FileInfo>,
-}
-
-#[cfg(procmacro2_semver_exempt)]
-impl Codemap {
- fn next_start_pos(&self) -> u32 {
- // Add 1 so there's always space between files.
- //
- // We'll always have at least 1 file, as we initialize our files list
- // with a dummy file.
- self.files.last().unwrap().span.hi + 1
- }
-
- fn add_file(&mut self, name: &str, src: &str) -> Span {
- let lines = lines_offsets(src);
- let lo = self.next_start_pos();
- // XXX(nika): Shouild we bother doing a checked cast or checked add here?
- let span = Span {
- lo: lo,
- hi: lo + (src.len() as u32),
- };
-
- self.files.push(FileInfo {
- name: name.to_owned(),
- span: span,
- lines: lines,
- });
-
- span
- }
-
- fn fileinfo(&self, span: Span) -> &FileInfo {
- for file in &self.files {
- if file.span_within(span) {
- return file;
- }
- }
- panic!("Invalid span with no related FileInfo!");
- }
-}
-
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
-pub struct Span {
- #[cfg(procmacro2_semver_exempt)]
- lo: u32,
- #[cfg(procmacro2_semver_exempt)]
- hi: u32,
-}
-
-impl Span {
- #[cfg(not(procmacro2_semver_exempt))]
- pub fn call_site() -> Span {
- Span {}
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn call_site() -> Span {
- Span { lo: 0, hi: 0 }
- }
-
- pub fn def_site() -> Span {
- Span::call_site()
- }
-
- pub fn resolved_at(&self, _other: Span) -> Span {
- // Stable spans consist only of line/column information, so
- // `resolved_at` and `located_at` only select which span the
- // caller wants line/column information from.
- *self
- }
-
- pub fn located_at(&self, other: Span) -> Span {
- other
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn source_file(&self) -> SourceFile {
- CODEMAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- SourceFile {
- name: FileName(fi.name.clone()),
- }
- })
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn start(&self) -> LineColumn {
- CODEMAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- fi.offset_line_column(self.lo as usize)
- })
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn end(&self) -> LineColumn {
- CODEMAP.with(|cm| {
- let cm = cm.borrow();
- let fi = cm.fileinfo(*self);
- fi.offset_line_column(self.hi as usize)
- })
- }
-
- #[cfg(procmacro2_semver_exempt)]
- pub fn join(&self, other: Span) -> Option<Span> {
- CODEMAP.with(|cm| {
- let cm = cm.borrow();
- // If `other` is not within the same FileInfo as us, return None.
- if !cm.fileinfo(*self).span_within(other) {
- return None;
- }
- Some(Span {
- lo: cmp::min(self.lo, other.lo),
- hi: cmp::max(self.hi, other.hi),
- })
- })
- }
-}
-
-#[derive(Copy, Clone)]
-pub struct Term {
- intern: usize,
- span: Span,
-}
-
-thread_local!(static SYMBOLS: RefCell<Interner> = RefCell::new(Interner::new()));
-
-impl Term {
- pub fn new(string: &str, span: Span) -> Term {
- validate_term(string);
-
- Term {
- intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)),
- span: span,
- }
- }
-
- pub fn as_str(&self) -> &str {
- SYMBOLS.with(|interner| {
- let interner = interner.borrow();
- let s = interner.get(self.intern);
- unsafe { &*(s as *const str) }
- })
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-fn validate_term(string: &str) {
- let validate = if string.starts_with('\'') {
- &string[1..]
- } else if string.starts_with("r#") {
- &string[2..]
- } else {
- string
- };
-
- if validate.is_empty() {
- panic!("Term is not allowed to be empty; use Option<Term>");
- }
-
- if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
- panic!("Term cannot be a number; use Literal instead");
- }
-
- fn xid_ok(string: &str) -> bool {
- let mut chars = string.chars();
- let first = chars.next().unwrap();
- if !(UnicodeXID::is_xid_start(first) || first == '_') {
- return false;
- }
- for ch in chars {
- if !UnicodeXID::is_xid_continue(ch) {
- return false;
- }
- }
- true
- }
-
- if !xid_ok(validate) {
- panic!("{:?} is not a valid Term", string);
- }
-}
-
-impl fmt::Debug for Term {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("Term").field(&self.as_str()).finish()
- }
-}
-
-struct Interner {
- string_to_index: HashMap<MyRc, usize>,
- index_to_string: Vec<Rc<String>>,
-}
-
-#[derive(Hash, Eq, PartialEq)]
-struct MyRc(Rc<String>);
-
-impl Borrow<str> for MyRc {
- fn borrow(&self) -> &str {
- &self.0
- }
-}
-
-impl Interner {
- fn new() -> Interner {
- Interner {
- string_to_index: HashMap::new(),
- index_to_string: Vec::new(),
- }
- }
-
- fn intern(&mut self, s: &str) -> usize {
- if let Some(&idx) = self.string_to_index.get(s) {
- return idx;
- }
- let s = Rc::new(s.to_string());
- self.index_to_string.push(s.clone());
- self.string_to_index
- .insert(MyRc(s), self.index_to_string.len() - 1);
- self.index_to_string.len() - 1
- }
-
- fn get(&self, idx: usize) -> &str {
- &self.index_to_string[idx]
- }
-}
-
-#[derive(Clone, Debug)]
-pub struct Literal {
- text: String,
- span: Span,
-}
-
-macro_rules! suffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(format!(concat!("{}", stringify!($kind)), n))
- }
- )*)
-}
-
-macro_rules! unsuffixed_numbers {
- ($($name:ident => $kind:ident,)*) => ($(
- pub fn $name(n: $kind) -> Literal {
- Literal::_new(n.to_string())
- }
- )*)
-}
-
-impl Literal {
- fn _new(text: String) -> Literal {
- Literal {
- text: text,
- span: Span::call_site(),
- }
- }
-
- suffixed_numbers! {
- u8_suffixed => u8,
- u16_suffixed => u16,
- u32_suffixed => u32,
- u64_suffixed => u64,
- usize_suffixed => usize,
- i8_suffixed => i8,
- i16_suffixed => i16,
- i32_suffixed => i32,
- i64_suffixed => i64,
- isize_suffixed => isize,
-
- f32_suffixed => f32,
- f64_suffixed => f64,
- }
-
- unsuffixed_numbers! {
- u8_unsuffixed => u8,
- u16_unsuffixed => u16,
- u32_unsuffixed => u32,
- u64_unsuffixed => u64,
- usize_unsuffixed => usize,
- i8_unsuffixed => i8,
- i16_unsuffixed => i16,
- i32_unsuffixed => i32,
- i64_unsuffixed => i64,
- isize_unsuffixed => isize,
- }
-
- pub fn f32_unsuffixed(f: f32) -> Literal {
- let mut s = f.to_string();
- if !s.contains(".") {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn f64_unsuffixed(f: f64) -> Literal {
- let mut s = f.to_string();
- if !s.contains(".") {
- s.push_str(".0");
- }
- Literal::_new(s)
- }
-
- pub fn string(t: &str) -> Literal {
- let mut s = t.chars()
- .flat_map(|c| c.escape_default())
- .collect::<String>();
- s.push('"');
- s.insert(0, '"');
- Literal::_new(s)
- }
-
- pub fn character(t: char) -> Literal {
- Literal::_new(format!("'{}'", t.escape_default().collect::<String>()))
- }
-
- pub fn byte_string(bytes: &[u8]) -> Literal {
- let mut escaped = "b\"".to_string();
- for b in bytes {
- match *b {
- b'\0' => escaped.push_str(r"\0"),
- b'\t' => escaped.push_str(r"\t"),
- b'\n' => escaped.push_str(r"\n"),
- b'\r' => escaped.push_str(r"\r"),
- b'"' => escaped.push_str("\\\""),
- b'\\' => escaped.push_str("\\\\"),
- b'\x20'...b'\x7E' => escaped.push(*b as char),
- _ => escaped.push_str(&format!("\\x{:02X}", b)),
- }
- }
- escaped.push('"');
- Literal::_new(escaped)
- }
-
- pub fn span(&self) -> Span {
- self.span
- }
-
- pub fn set_span(&mut self, span: Span) {
- self.span = span;
- }
-}
-
-impl fmt::Display for Literal {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.text.fmt(f)
- }
-}
-
-fn token_stream(mut input: Cursor) -> PResult<::TokenStream> {
- let mut trees = Vec::new();
- loop {
- let input_no_ws = skip_whitespace(input);
- if input_no_ws.rest.len() == 0 {
- break
- }
- if let Ok((a, tokens)) = doc_comment(input_no_ws) {
- input = a;
- trees.extend(tokens);
- continue
- }
-
- let (a, tt) = match token_tree(input_no_ws) {
- Ok(p) => p,
- Err(_) => break,
- };
- trees.push(tt);
- input = a;
- }
- Ok((input, ::TokenStream::_new(TokenStream { inner: trees })))
-}
-
-#[cfg(not(procmacro2_semver_exempt))]
-fn spanned<'a, T>(
- input: Cursor<'a>,
- f: fn(Cursor<'a>) -> PResult<'a, T>,
-) -> PResult<'a, (T, ::Span)> {
- let (a, b) = f(skip_whitespace(input))?;
- Ok((a, ((b, ::Span::_new(Span { })))))
-}
-
-#[cfg(procmacro2_semver_exempt)]
-fn spanned<'a, T>(
- input: Cursor<'a>,
- f: fn(Cursor<'a>) -> PResult<'a, T>,
-) -> PResult<'a, (T, ::Span)> {
- let input = skip_whitespace(input);
- let lo = input.off;
- let (a, b) = f(input)?;
- let hi = a.off;
- let span = ::Span::_new(Span { lo: lo, hi: hi });
- Ok((a, (b, span)))
-}
-
-fn token_tree(input: Cursor) -> PResult<TokenTree> {
- let (rest, (mut tt, span)) = spanned(input, token_kind)?;
- tt.set_span(span);
- Ok((rest, tt))
-}
-
-named!(token_kind -> TokenTree, alt!(
- map!(group, TokenTree::Group)
- |
- map!(literal, TokenTree::Literal) // must be before symbol
- |
- symbol
- |
- map!(op, TokenTree::Op)
-));
-
-named!(group -> Group, alt!(
- delimited!(
- punct!("("),
- token_stream,
- punct!(")")
- ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
- |
- delimited!(
- punct!("["),
- token_stream,
- punct!("]")
- ) => { |ts| Group::new(Delimiter::Bracket, ts) }
- |
- delimited!(
- punct!("{"),
- token_stream,
- punct!("}")
- ) => { |ts| Group::new(Delimiter::Brace, ts) }
-));
-
-fn symbol(mut input: Cursor) -> PResult<TokenTree> {
- input = skip_whitespace(input);
-
- let mut chars = input.char_indices();
-
- let lifetime = input.starts_with("'");
- if lifetime {
- chars.next();
- }
-
- let raw = !lifetime && input.starts_with("r#");
- if raw {
- chars.next();
- chars.next();
- }
-
- match chars.next() {
- Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {}
- _ => return Err(LexError),
- }
-
- let mut end = input.len();
- for (i, ch) in chars {
- if !UnicodeXID::is_xid_continue(ch) {
- end = i;
- break;
- }
- }
-
- let a = &input.rest[..end];
- if a == "r#_" || lifetime && a != "'static" && KEYWORDS.contains(&&a[1..]) {
- Err(LexError)
- } else if a == "_" {
- Ok((input.advance(end), Op::new('_', Spacing::Alone).into()))
- } else {
- Ok((
- input.advance(end),
- ::Term::new(a, ::Span::call_site()).into(),
- ))
- }
-}
-
-// From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs
-static KEYWORDS: &'static [&'static str] = &[
- "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
- "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
- "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure",
- "ref", "return", "self", "Self", "sizeof", "static", "struct", "super", "trait", "true",
- "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
-];
-
-fn literal(input: Cursor) -> PResult<::Literal> {
- let input_no_ws = skip_whitespace(input);
-
- match literal_nocapture(input_no_ws) {
- Ok((a, ())) => {
- let start = input.len() - input_no_ws.len();
- let len = input_no_ws.len() - a.len();
- let end = start + len;
- Ok((
- a,
- ::Literal::_new(Literal::_new(input.rest[start..end].to_string())),
- ))
- }
- Err(LexError) => Err(LexError),
- }
-}
-
-named!(literal_nocapture -> (), alt!(
- string
- |
- byte_string
- |
- byte
- |
- character
- |
- float
- |
- int
-));
-
-named!(string -> (), alt!(
- quoted_string
- |
- preceded!(
- punct!("r"),
- raw_string
- ) => { |_| () }
-));
-
-named!(quoted_string -> (), delimited!(
- punct!("\""),
- cooked_string,
- tag!("\"")
-));
-
-fn cooked_string(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices().peekable();
- while let Some((byte_offset, ch)) = chars.next() {
- match ch {
- '"' => {
- return Ok((input.advance(byte_offset), ()));
- }
- '\r' => {
- if let Some((_, '\n')) = chars.next() {
- // ...
- } else {
- break;
- }
- }
- '\\' => match chars.next() {
- Some((_, 'x')) => {
- if !backslash_x_char(&mut chars) {
- break;
- }
- }
- Some((_, 'n')) | Some((_, 'r')) | Some((_, 't')) | Some((_, '\\'))
- | Some((_, '\'')) | Some((_, '"')) | Some((_, '0')) => {}
- Some((_, 'u')) => {
- if !backslash_u(&mut chars) {
- break;
- }
- }
- Some((_, '\n')) | Some((_, '\r')) => {
- while let Some(&(_, ch)) = chars.peek() {
- if ch.is_whitespace() {
- chars.next();
- } else {
- break;
- }
- }
- }
- _ => break,
- },
- _ch => {}
- }
- }
- Err(LexError)
-}
-
-named!(byte_string -> (), alt!(
- delimited!(
- punct!("b\""),
- cooked_byte_string,
- tag!("\"")
- ) => { |_| () }
- |
- preceded!(
- punct!("br"),
- raw_string
- ) => { |_| () }
-));
-
-fn cooked_byte_string(mut input: Cursor) -> PResult<()> {
- let mut bytes = input.bytes().enumerate();
- 'outer: while let Some((offset, b)) = bytes.next() {
- match b {
- b'"' => {
- return Ok((input.advance(offset), ()));
- }
- b'\r' => {
- if let Some((_, b'\n')) = bytes.next() {
- // ...
- } else {
- break;
- }
- }
- b'\\' => match bytes.next() {
- Some((_, b'x')) => {
- if !backslash_x_byte(&mut bytes) {
- break;
- }
- }
- Some((_, b'n')) | Some((_, b'r')) | Some((_, b't')) | Some((_, b'\\'))
- | Some((_, b'0')) | Some((_, b'\'')) | Some((_, b'"')) => {}
- Some((newline, b'\n')) | Some((newline, b'\r')) => {
- let rest = input.advance(newline + 1);
- for (offset, ch) in rest.char_indices() {
- if !ch.is_whitespace() {
- input = rest.advance(offset);
- bytes = input.bytes().enumerate();
- continue 'outer;
- }
- }
- break;
- }
- _ => break,
- },
- b if b < 0x80 => {}
- _ => break,
- }
- }
- Err(LexError)
-}
-
-fn raw_string(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices();
- let mut n = 0;
- while let Some((byte_offset, ch)) = chars.next() {
- match ch {
- '"' => {
- n = byte_offset;
- break;
- }
- '#' => {}
- _ => return Err(LexError),
- }
- }
- for (byte_offset, ch) in chars {
- match ch {
- '"' if input.advance(byte_offset + 1).starts_with(&input.rest[..n]) => {
- let rest = input.advance(byte_offset + 1 + n);
- return Ok((rest, ()));
- }
- '\r' => {}
- _ => {}
- }
- }
- Err(LexError)
-}
-
-named!(byte -> (), do_parse!(
- punct!("b") >>
- tag!("'") >>
- cooked_byte >>
- tag!("'") >>
- (())
-));
-
-fn cooked_byte(input: Cursor) -> PResult<()> {
- let mut bytes = input.bytes().enumerate();
- let ok = match bytes.next().map(|(_, b)| b) {
- Some(b'\\') => match bytes.next().map(|(_, b)| b) {
- Some(b'x') => backslash_x_byte(&mut bytes),
- Some(b'n') | Some(b'r') | Some(b't') | Some(b'\\') | Some(b'0') | Some(b'\'')
- | Some(b'"') => true,
- _ => false,
- },
- b => b.is_some(),
- };
- if ok {
- match bytes.next() {
- Some((offset, _)) => {
- if input.chars().as_str().is_char_boundary(offset) {
- Ok((input.advance(offset), ()))
- } else {
- Err(LexError)
- }
- }
- None => Ok((input.advance(input.len()), ())),
- }
- } else {
- Err(LexError)
- }
-}
-
-named!(character -> (), do_parse!(
- punct!("'") >>
- cooked_char >>
- tag!("'") >>
- (())
-));
-
-fn cooked_char(input: Cursor) -> PResult<()> {
- let mut chars = input.char_indices();
- let ok = match chars.next().map(|(_, ch)| ch) {
- Some('\\') => match chars.next().map(|(_, ch)| ch) {
- Some('x') => backslash_x_char(&mut chars),
- Some('u') => backslash_u(&mut chars),
- Some('n') | Some('r') | Some('t') | Some('\\') | Some('0') | Some('\'') | Some('"') => {
- true
- }
- _ => false,
- },
- ch => ch.is_some(),
- };
- if ok {
- match chars.next() {
- Some((idx, _)) => Ok((input.advance(idx), ())),
- None => Ok((input.advance(input.len()), ())),
- }
- } else {
- Err(LexError)
- }
-}
-
-macro_rules! next_ch {
- ($chars:ident @ $pat:pat $(| $rest:pat)*) => {
- match $chars.next() {
- Some((_, ch)) => match ch {
- $pat $(| $rest)* => ch,
- _ => return false,
- },
- None => return false
- }
- };
-}
-
-fn backslash_x_char<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '0'...'7');
- next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
- true
-}
-
-fn backslash_x_byte<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, u8)>,
-{
- next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
- next_ch!(chars @ b'0'...b'9' | b'a'...b'f' | b'A'...b'F');
- true
-}
-
-fn backslash_u<I>(chars: &mut I) -> bool
-where
- I: Iterator<Item = (usize, char)>,
-{
- next_ch!(chars @ '{');
- next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F');
- loop {
- let c = next_ch!(chars @ '0'...'9' | 'a'...'f' | 'A'...'F' | '_' | '}');
- if c == '}' {
- return true;
- }
- }
-}
-
-fn float(input: Cursor) -> PResult<()> {
- let (rest, ()) = float_digits(input)?;
- for suffix in &["f32", "f64"] {
- if rest.starts_with(suffix) {
- return word_break(rest.advance(suffix.len()));
- }
- }
- word_break(rest)
-}
-
-fn float_digits(input: Cursor) -> PResult<()> {
- let mut chars = input.chars().peekable();
- match chars.next() {
- Some(ch) if ch >= '0' && ch <= '9' => {}
- _ => return Err(LexError),
- }
-
- let mut len = 1;
- let mut has_dot = false;
- let mut has_exp = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '0'...'9' | '_' => {
- chars.next();
- len += 1;
- }
- '.' => {
- if has_dot {
- break;
- }
- chars.next();
- if chars
- .peek()
- .map(|&ch| ch == '.' || UnicodeXID::is_xid_start(ch))
- .unwrap_or(false)
- {
- return Err(LexError);
- }
- len += 1;
- has_dot = true;
- }
- 'e' | 'E' => {
- chars.next();
- len += 1;
- has_exp = true;
- break;
- }
- _ => break,
- }
- }
-
- let rest = input.advance(len);
- if !(has_dot || has_exp || rest.starts_with("f32") || rest.starts_with("f64")) {
- return Err(LexError);
- }
-
- if has_exp {
- let mut has_exp_value = false;
- while let Some(&ch) = chars.peek() {
- match ch {
- '+' | '-' => {
- if has_exp_value {
- break;
- }
- chars.next();
- len += 1;
- }
- '0'...'9' => {
- chars.next();
- len += 1;
- has_exp_value = true;
- }
- '_' => {
- chars.next();
- len += 1;
- }
- _ => break,
- }
- }
- if !has_exp_value {
- return Err(LexError);
- }
- }
-
- Ok((input.advance(len), ()))
-}
-
-fn int(input: Cursor) -> PResult<()> {
- let (rest, ()) = digits(input)?;
- for suffix in &[
- "isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128"
- ] {
- if rest.starts_with(suffix) {
- return word_break(rest.advance(suffix.len()));
- }
- }
- word_break(rest)
-}
-
-fn digits(mut input: Cursor) -> PResult<()> {
- let base = if input.starts_with("0x") {
- input = input.advance(2);
- 16
- } else if input.starts_with("0o") {
- input = input.advance(2);
- 8
- } else if input.starts_with("0b") {
- input = input.advance(2);
- 2
- } else {
- 10
- };
-
- let mut len = 0;
- let mut empty = true;
- for b in input.bytes() {
- let digit = match b {
- b'0'...b'9' => (b - b'0') as u64,
- b'a'...b'f' => 10 + (b - b'a') as u64,
- b'A'...b'F' => 10 + (b - b'A') as u64,
- b'_' => {
- if empty && base == 10 {
- return Err(LexError);
- }
- len += 1;
- continue;
- }
- _ => break,
- };
- if digit >= base {
- return Err(LexError);
- }
- len += 1;
- empty = false;
- }
- if empty {
- Err(LexError)
- } else {
- Ok((input.advance(len), ()))
- }
-}
-
-fn op(input: Cursor) -> PResult<Op> {
- let input = skip_whitespace(input);
- match op_char(input) {
- Ok((rest, ch)) => {
- let kind = match op_char(rest) {
- Ok(_) => Spacing::Joint,
- Err(LexError) => Spacing::Alone,
- };
- Ok((rest, Op::new(ch, kind)))
- }
- Err(LexError) => Err(LexError),
- }
-}
-
-fn op_char(input: Cursor) -> PResult<char> {
- let mut chars = input.chars();
- let first = match chars.next() {
- Some(ch) => ch,
- None => {
- return Err(LexError);
- }
- };
- let recognized = "~!@#$%^&*-=+|;:,<.>/?";
- if recognized.contains(first) {
- Ok((input.advance(first.len_utf8()), first))
- } else {
- Err(LexError)
- }
-}
-
-fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
- let mut trees = Vec::new();
- let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
- trees.push(TokenTree::Op(Op::new('#', Spacing::Alone)));
- if inner {
- trees.push(Op::new('!', Spacing::Alone).into());
- }
- let mut stream = vec![
- TokenTree::Term(::Term::new("doc", span)),
- TokenTree::Op(Op::new('=', Spacing::Alone)),
- TokenTree::Literal(::Literal::string(comment)),
- ];
- for tt in stream.iter_mut() {
- tt.set_span(span);
- }
- trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into());
- for tt in trees.iter_mut() {
- tt.set_span(span);
- }
- Ok((rest, trees))
-}
-
-named!(doc_comment_contents -> (&str, bool), alt!(
- do_parse!(
- punct!("//!") >>
- s: take_until_newline_or_eof!() >>
- ((s, true))
- )
- |
- do_parse!(
- option!(whitespace) >>
- peek!(tag!("/*!")) >>
- s: block_comment >>
- ((s, true))
- )
- |
- do_parse!(
- punct!("///") >>
- not!(tag!("/")) >>
- s: take_until_newline_or_eof!() >>
- ((s, false))
- )
- |
- do_parse!(
- option!(whitespace) >>
- peek!(tuple!(tag!("/**"), not!(tag!("*")))) >>
- s: block_comment >>
- ((s, false))
- )
-));
deleted file mode 100644
--- a/third_party/rust/proc-macro2-0.3.6/tests/test.rs
+++ /dev/null
@@ -1,304 +0,0 @@
-extern crate proc_macro2;
-
-use std::str::{self, FromStr};
-
-use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
-
-#[test]
-fn terms() {
- assert_eq!(Term::new("String", Span::call_site()).as_str(), "String");
- assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn");
- assert_eq!(Term::new("_", Span::call_site()).as_str(), "_");
-}
-
-#[test]
-fn raw_terms() {
- assert_eq!(Term::new("r#String", Span::call_site()).as_str(), "r#String");
- assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn");
- assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_");
-}
-
-#[test]
-fn lifetimes() {
- assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a");
- assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static");
- assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_");
-}
-
-#[test]
-#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
-fn term_empty() {
- Term::new("", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Term cannot be a number; use Literal instead")]
-fn term_number() {
- Term::new("255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "\"a#\" is not a valid Term")]
-fn term_invalid() {
- Term::new("a#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
-fn raw_term_empty() {
- Term::new("r#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Term cannot be a number; use Literal instead")]
-fn raw_term_number() {
- Term::new("r#255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "\"r#a#\" is not a valid Term")]
-fn raw_term_invalid() {
- Term::new("r#a#", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
-fn lifetime_empty() {
- Term::new("'", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = "Term cannot be a number; use Literal instead")]
-fn lifetime_number() {
- Term::new("'255", Span::call_site());
-}
-
-#[test]
-#[should_panic(expected = r#""\'a#" is not a valid Term"#)]
-fn lifetime_invalid() {
- Term::new("'a#", Span::call_site());
-}
-
-#[test]
-fn literals() {
- assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
- assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
- assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
-}
-
-#[test]
-fn roundtrip() {
- fn roundtrip(p: &str) {
- println!("parse: {}", p);
- let s = p.parse::<TokenStream>().unwrap().to_string();
- println!("first: {}", s);
- let s2 = s.to_string().parse::<TokenStream>().unwrap().to_string();
- assert_eq!(s, s2);
- }
- roundtrip("a");
- roundtrip("<<");
- roundtrip("<<=");
- roundtrip(
- "
- 1
- 1.0
- 1f32
- 2f64
- 1usize
- 4isize
- 4e10
- 1_000
- 1_0i32
- 8u8
- 9
- 0
- 0xffffffffffffffffffffffffffffffff
- ",
- );
- roundtrip("'a");
- roundtrip("'static");
- roundtrip("'\\u{10__FFFF}'");
- roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
-}
-
-#[test]
-fn fail() {
- fn fail(p: &str) {
- if p.parse::<TokenStream>().is_ok() {
- panic!("should have failed to parse: {}", p);
- }
- }
- fail("1x");
- fail("1u80");
- fail("1f320");
- fail("' static");
- fail("'mut");
- fail("r#1");
- fail("r#_");
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[test]
-fn span_test() {
- use proc_macro2::TokenTree;
-
- fn check_spans(p: &str, mut lines: &[(usize, usize, usize, usize)]) {
- let ts = p.parse::<TokenStream>().unwrap();
- check_spans_internal(ts, &mut lines);
- }
-
- fn check_spans_internal(ts: TokenStream, lines: &mut &[(usize, usize, usize, usize)]) {
- for i in ts {
- if let Some((&(sline, scol, eline, ecol), rest)) = lines.split_first() {
- *lines = rest;
-
- let start = i.span().start();
- assert_eq!(start.line, sline, "sline did not match for {}", i);
- assert_eq!(start.column, scol, "scol did not match for {}", i);
-
- let end = i.span().end();
- assert_eq!(end.line, eline, "eline did not match for {}", i);
- assert_eq!(end.column, ecol, "ecol did not match for {}", i);
-
- match i {
- TokenTree::Group(ref g) => {
- check_spans_internal(g.stream().clone(), lines);
- }
- _ => {}
- }
- }
- }
- }
-
- check_spans(
- "\
-/// This is a document comment
-testing 123
-{
- testing 234
-}",
- &[
- (1, 0, 1, 30), // #
- (1, 0, 1, 30), // [ ... ]
- (1, 0, 1, 30), // doc
- (1, 0, 1, 30), // =
- (1, 0, 1, 30), // "This is..."
- (2, 0, 2, 7), // testing
- (2, 8, 2, 11), // 123
- (3, 0, 5, 1), // { ... }
- (4, 2, 4, 9), // testing
- (4, 10, 4, 13), // 234
- ],
- );
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[cfg(not(feature = "nightly"))]
-#[test]
-fn default_span() {
- let start = Span::call_site().start();
- assert_eq!(start.line, 1);
- assert_eq!(start.column, 0);
- let end = Span::call_site().end();
- assert_eq!(end.line, 1);
- assert_eq!(end.column, 0);
- let source_file = Span::call_site().source_file();
- assert_eq!(source_file.path().to_string(), "<unspecified>");
- assert!(!source_file.is_real());
-}
-
-#[cfg(procmacro2_semver_exempt)]
-#[test]
-fn span_join() {
- let source1 = "aaa\nbbb"
- .parse::<TokenStream>()
- .unwrap()
- .into_iter()
- .collect::<Vec<_>>();
- let source2 = "ccc\nddd"
- .parse::<TokenStream>()
- .unwrap()
- .into_iter()
- .collect::<Vec<_>>();
-
- assert!(source1[0].span().source_file() != source2[0].span().source_file());
- assert_eq!(
- source1[0].span().source_file(),
- source1[1].span().source_file()
- );
-
- let joined1 = source1[0].span().join(source1[1].span());
- let joined2 = source1[0].span().join(source2[0].span());
- assert!(joined1.is_some());
- assert!(joined2.is_none());
-
- let start = joined1.unwrap().start();
- let end = joined1.unwrap().end();
- assert_eq!(start.line, 1);
- assert_eq!(start.column, 0);
- assert_eq!(end.line, 2);
- assert_eq!(end.column, 3);
-
- assert_eq!(
- joined1.unwrap().source_file(),
- source1[0].span().source_file()
- );
-}
-
-#[test]
-fn no_panic() {
- let s = str::from_utf8(b"b\'\xc2\x86 \x00\x00\x00^\"").unwrap();
- assert!(s.parse::<proc_macro2::TokenStream>().is_err());
-}
-
-#[test]
-fn tricky_doc_comment() {
- let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
-
- let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
- match tokens[0] {
- proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
- _ => panic!("wrong token {:?}", tokens[0]),
- }
- let mut tokens = match tokens[1] {
- proc_macro2::TokenTree::Group(ref tt) => {
- assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
- tt.stream().into_iter()
- }
- _ => panic!("wrong token {:?}", tokens[0]),
- };
-
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
- t => panic!("wrong token {:?}", t),
- }
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
- t => panic!("wrong token {:?}", t),
- }
- match tokens.next().unwrap() {
- proc_macro2::TokenTree::Literal(ref tt) => {
- assert_eq!(tt.to_string(), "\" doc\"");
- }
- t => panic!("wrong token {:?}", t),
- }
- assert!(tokens.next().is_none());
-
- let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
- let tokens = stream.into_iter().collect::<Vec<_>>();
- assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
-}
-
-#[test]
-fn raw_identifier() {
- let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
- match tts.next().unwrap() {
- TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
- wrong => panic!("wrong token {:?}", wrong),
- }
- assert!(tts.next().is_none());
-}