Bug 1468950 - Update Cargo lockfiles and re-vendor rust dependencies. r?Gankro draft
authorKartikaya Gupta <kgupta@mozilla.com>
Thu, 21 Jun 2018 08:22:02 -0400
changeset 809170 29a4c4d15dadb2bd2d7b75e758e16d67567ff083
parent 809169 168c03ba267985d6209291d534f8b3345b0d04f4
push id113563
push userkgupta@mozilla.com
push dateThu, 21 Jun 2018 12:23:50 +0000
reviewersGankro
bugs1468950
milestone62.0a1
Bug 1468950 - Update Cargo lockfiles and re-vendor rust dependencies. r?Gankro MozReview-Commit-ID: 3kcDWGBPXt9
Cargo.lock
third_party/rust/proc-macro2-0.3.6/.cargo-checksum.json
third_party/rust/proc-macro2-0.3.6/.travis.yml
third_party/rust/proc-macro2-0.3.6/Cargo.toml
third_party/rust/proc-macro2-0.3.6/LICENSE-APACHE
third_party/rust/proc-macro2-0.3.6/LICENSE-MIT
third_party/rust/proc-macro2-0.3.6/README.md
third_party/rust/proc-macro2-0.3.6/src/lib.rs
third_party/rust/proc-macro2-0.3.6/src/stable.rs
third_party/rust/proc-macro2-0.3.6/src/strnom.rs
third_party/rust/proc-macro2-0.3.6/src/unstable.rs
third_party/rust/proc-macro2-0.3.6/tests/test.rs
third_party/rust/proc-macro2/.cargo-checksum.json
third_party/rust/proc-macro2/.travis.yml
third_party/rust/proc-macro2/Cargo.toml
third_party/rust/proc-macro2/README.md
third_party/rust/proc-macro2/src/lib.rs
third_party/rust/proc-macro2/src/stable.rs
third_party/rust/proc-macro2/src/strnom.rs
third_party/rust/proc-macro2/src/unstable.rs
third_party/rust/proc-macro2/tests/test.rs
third_party/rust/quote-0.5.2/.cargo-checksum.json
third_party/rust/quote-0.5.2/Cargo.toml
third_party/rust/quote-0.5.2/LICENSE-APACHE
third_party/rust/quote-0.5.2/LICENSE-MIT
third_party/rust/quote-0.5.2/README.md
third_party/rust/quote-0.5.2/src/lib.rs
third_party/rust/quote-0.5.2/src/to_tokens.rs
third_party/rust/quote-0.5.2/src/tokens.rs
third_party/rust/quote-0.5.2/tests/test.rs
third_party/rust/quote/.cargo-checksum.json
third_party/rust/quote/Cargo.toml
third_party/rust/quote/README.md
third_party/rust/quote/src/ext.rs
third_party/rust/quote/src/lib.rs
third_party/rust/quote/src/to_tokens.rs
third_party/rust/quote/src/tokens.rs
third_party/rust/quote/tests/test.rs
third_party/rust/serde/.cargo-checksum.json
third_party/rust/serde/Cargo.toml
third_party/rust/serde/README.md
third_party/rust/serde/build.rs
third_party/rust/serde/crates-io.md
third_party/rust/serde/src/de/from_primitive.rs
third_party/rust/serde/src/de/impls.rs
third_party/rust/serde/src/de/mod.rs
third_party/rust/serde/src/de/value.rs
third_party/rust/serde/src/integer128.rs
third_party/rust/serde/src/lib.rs
third_party/rust/serde/src/macros.rs
third_party/rust/serde/src/private/de.rs
third_party/rust/serde/src/private/ser.rs
third_party/rust/serde/src/ser/impls.rs
third_party/rust/serde/src/ser/mod.rs
third_party/rust/serde_derive/.cargo-checksum.json
third_party/rust/serde_derive/Cargo.toml
third_party/rust/serde_derive/README.md
third_party/rust/serde_derive/crates-io.md
third_party/rust/serde_derive/src/bound.rs
third_party/rust/serde_derive/src/de.rs
third_party/rust/serde_derive/src/fragment.rs
third_party/rust/serde_derive/src/internals/ast.rs
third_party/rust/serde_derive/src/internals/attr.rs
third_party/rust/serde_derive/src/internals/check.rs
third_party/rust/serde_derive/src/internals/mod.rs
third_party/rust/serde_derive/src/lib.rs
third_party/rust/serde_derive/src/pretend.rs
third_party/rust/serde_derive/src/ser.rs
third_party/rust/serde_derive/src/try.rs
third_party/rust/syn-0.13.1/.cargo-checksum.json
third_party/rust/syn-0.13.1/Cargo.toml
third_party/rust/syn-0.13.1/LICENSE-APACHE
third_party/rust/syn-0.13.1/LICENSE-MIT
third_party/rust/syn-0.13.1/README.md
third_party/rust/syn-0.13.1/src/attr.rs
third_party/rust/syn-0.13.1/src/buffer.rs
third_party/rust/syn-0.13.1/src/data.rs
third_party/rust/syn-0.13.1/src/derive.rs
third_party/rust/syn-0.13.1/src/error.rs
third_party/rust/syn-0.13.1/src/expr.rs
third_party/rust/syn-0.13.1/src/file.rs
third_party/rust/syn-0.13.1/src/gen/fold.rs
third_party/rust/syn-0.13.1/src/gen/visit.rs
third_party/rust/syn-0.13.1/src/gen/visit_mut.rs
third_party/rust/syn-0.13.1/src/gen_helper.rs
third_party/rust/syn-0.13.1/src/generics.rs
third_party/rust/syn-0.13.1/src/ident.rs
third_party/rust/syn-0.13.1/src/item.rs
third_party/rust/syn-0.13.1/src/lib.rs
third_party/rust/syn-0.13.1/src/lifetime.rs
third_party/rust/syn-0.13.1/src/lit.rs
third_party/rust/syn-0.13.1/src/mac.rs
third_party/rust/syn-0.13.1/src/macros.rs
third_party/rust/syn-0.13.1/src/op.rs
third_party/rust/syn-0.13.1/src/parse_quote.rs
third_party/rust/syn-0.13.1/src/parsers.rs
third_party/rust/syn-0.13.1/src/path.rs
third_party/rust/syn-0.13.1/src/punctuated.rs
third_party/rust/syn-0.13.1/src/spanned.rs
third_party/rust/syn-0.13.1/src/synom.rs
third_party/rust/syn-0.13.1/src/token.rs
third_party/rust/syn-0.13.1/src/tt.rs
third_party/rust/syn-0.13.1/src/ty.rs
third_party/rust/syn/.cargo-checksum.json
third_party/rust/syn/Cargo.toml
third_party/rust/syn/README.md
third_party/rust/syn/src/attr.rs
third_party/rust/syn/src/buffer.rs
third_party/rust/syn/src/data.rs
third_party/rust/syn/src/derive.rs
third_party/rust/syn/src/error.rs
third_party/rust/syn/src/expr.rs
third_party/rust/syn/src/file.rs
third_party/rust/syn/src/gen/fold.rs
third_party/rust/syn/src/gen/visit.rs
third_party/rust/syn/src/gen/visit_mut.rs
third_party/rust/syn/src/gen_helper.rs
third_party/rust/syn/src/generics.rs
third_party/rust/syn/src/ident.rs
third_party/rust/syn/src/item.rs
third_party/rust/syn/src/lib.rs
third_party/rust/syn/src/lifetime.rs
third_party/rust/syn/src/lit.rs
third_party/rust/syn/src/mac.rs
third_party/rust/syn/src/macros.rs
third_party/rust/syn/src/op.rs
third_party/rust/syn/src/parse_quote.rs
third_party/rust/syn/src/parsers.rs
third_party/rust/syn/src/path.rs
third_party/rust/syn/src/punctuated.rs
third_party/rust/syn/src/spanned.rs
third_party/rust/syn/src/synom.rs
third_party/rust/syn/src/token.rs
third_party/rust/syn/src/tt.rs
third_party/rust/syn/src/ty.rs
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -29,17 +29,17 @@ dependencies = [
 ]
 
 [[package]]
 name = "app_units"
 version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "arrayvec"
 version = "0.4.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -87,18 +87,18 @@ dependencies = [
  "cubeb 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "futures 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
  "iovec 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
  "memmap 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
  "tokio-core 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "tokio-io 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "tokio-uds 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "audioipc-client"
 version = "0.4.0"
@@ -146,17 +146,17 @@ version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "bincode"
 version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "bindgen"
 version = "0.33.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cexpr 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -578,18 +578,18 @@ source = "registry+https://github.com/ru
 
 [[package]]
 name = "docopt"
 version = "0.8.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
  "strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "dtoa"
 version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
@@ -605,18 +605,18 @@ dependencies = [
 name = "dwrote"
 version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
  "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "either"
 version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
@@ -669,17 +669,17 @@ version = "0.11.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "euclid"
 version = "0.17.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "fallible"
 version = "0.0.1"
 dependencies = [
  "hashglobe 0.1.0",
  "smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1013,18 +1013,18 @@ dependencies = [
  "docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "ena 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "itertools 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "lalrpop-snap 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "lalrpop-util 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "petgraph 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
  "string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "lalrpop-intern"
 version = "0.15.1"
@@ -1593,16 +1593,24 @@ version = "0.0.1"
 name = "proc-macro2"
 version = "0.3.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "proc-macro2"
+version = "0.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "procedural-masquerade"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "pulse"
 version = "0.2.0"
 dependencies = [
@@ -1631,16 +1639,24 @@ source = "registry+https://github.com/ru
 name = "quote"
 version = "0.5.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "quote"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "rand"
 version = "0.3.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "fuchsia-zircon 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.39 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -1707,17 +1723,17 @@ dependencies = [
  "ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "ron"
 version = "0.1.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "rsdparsa"
 version = "0.1.0"
 
 [[package]]
 name = "rsdparsa_capi"
@@ -1793,38 +1809,38 @@ dependencies = [
 
 [[package]]
 name = "semver-parser"
 version = "0.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "serde"
-version = "1.0.58"
+version = "1.0.66"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
+ "serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
 ]
 
 [[package]]
 name = "serde_bytes"
 version = "0.10.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.58"
-source = "git+https://github.com/servo/serde?branch=deserialize_from_enums7#884e8078a9c74314fa4a5a2e6ce4ac67ab8fa415"
+version = "1.0.66"
+source = "git+https://github.com/servo/serde?branch=deserialize_from_enums8#c4457d804b38b14e699b45c01d1909f93f25ab5e"
 dependencies = [
- "proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.14.2 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "servo_arc"
 version = "0.1.1"
 dependencies = [
  "nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
  "stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1868,17 +1884,17 @@ source = "registry+https://github.com/ru
 name = "string_cache"
 version = "0.7.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
  "precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
  "string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "string_cache_codegen"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2005,16 +2021,26 @@ version = "0.13.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
+name = "syn"
+version = "0.14.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
 name = "synstructure"
 version = "0.8.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2129,17 +2155,17 @@ dependencies = [
  "tokio-io 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "toml"
 version = "0.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "traitobject"
 version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
@@ -2315,17 +2341,17 @@ dependencies = [
  "fxhash 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "gleam 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "lazy_static 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "num-traits 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
  "plane-split 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "ron 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
  "smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
  "webrender_api 0.57.2",
 ]
 
 [[package]]
 name = "webrender_api"
@@ -2334,19 +2360,19 @@ dependencies = [
  "app_units 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-foundation 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "core-graphics 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "dwrote 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "euclid 0.17.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_bytes 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
- "serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)",
+ "serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)",
  "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "webrender_bindings"
 version = "0.1.0"
 dependencies = [
  "app_units 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -2634,20 +2660,22 @@ dependencies = [
 "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
 "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
 "checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2"
 "checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
 "checksum plane-split 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7079b8485b4f9d9560dee7a69ca8f6ca781f9f284ff9d2bf27255d440b03e4af"
 "checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0"
 "checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
 "checksum proc-macro2 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118"
+"checksum proc-macro2 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "effdb53b25cdad54f8f48843d67398f7ef2e14f12c1b4cb4effc549a6462a4d6"
 "checksum procedural-masquerade 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9f566249236c6ca4340f7ca78968271f0ed2b0f234007a61b66f9ecd0af09260"
 "checksum quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "eda5fe9b71976e62bc81b781206aaa076401769b2143379d3eb2118388babac4"
 "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
 "checksum quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"
+"checksum quote 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"
 "checksum rand 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)" = "6475140dfd8655aeb72e1fd4b7a1cc1c202be65d71669476e392fe62532b9edd"
 "checksum rayon 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "485541959c8ecc49865526fe6c4de9653dd6e60d829d6edf0be228167b60372d"
 "checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8"
 "checksum redox_syscall 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "ab105df655884ede59d45b7070c8a65002d921461ee813a024558ca16030eea0"
 "checksum regex 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1731164734096285ec2a5ec7fea5248ae2f5485b3feeb0115af4fda2183b2d1b"
 "checksum regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75ecf88252dce580404a22444fc7d626c01815debba56a7f4f536772a5ff19d3"
 "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
 "checksum regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1ac0f60d675cc6cf13a20ec076568254472551051ad5dd050364d70671bf6b"
@@ -2656,31 +2684,32 @@ dependencies = [
 "checksum rust-ini 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8a654c5bda722c699be6b0fe4c0d90de218928da5b724c3e467fc48865c37263"
 "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
 "checksum safemem 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f"
 "checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637"
 "checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d"
 "checksum scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c79eb2c3ac4bc2507cda80e7f3ac5b88bd8eae4c0914d5663e6a8933994be918"
 "checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
 "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
-"checksum serde 1.0.58 (registry+https://github.com/rust-lang/crates.io-index)" = "34e9df8efbe7a2c12ceec1fc8744d56ae3374d8ae325f4a0028949d16433d554"
+"checksum serde 1.0.66 (registry+https://github.com/rust-lang/crates.io-index)" = "e9a2d9a9ac5120e0f768801ca2b58ad6eec929dc9d1d616c162f208869c2ce95"
 "checksum serde_bytes 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "adb6e51a6b3696b301bc221d785f898b4457c619b51d7ce195a6d20baecb37b3"
-"checksum serde_derive 1.0.58 (git+https://github.com/servo/serde?branch=deserialize_from_enums7)" = "<none>"
+"checksum serde_derive 1.0.66 (git+https://github.com/servo/serde?branch=deserialize_from_enums8)" = "<none>"
 "checksum simd 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ed3686dd9418ebcc3a26a0c0ae56deab0681e53fe899af91f5bbcee667ebffb1"
 "checksum siphasher 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2ffc669b726f2bc9a3bcff66e5e23b56ba6bf70e22a34c3d7b6d0b3450b65b84"
 "checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
 "checksum smallbitvec 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c63726029f0069f88467873e47f392575f28f9f16b72ac65465263db4b3a13c"
 "checksum smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44db0ecb22921ef790d17ae13a3f6d15784183ff5f2a01aa32098c7498d2b4b9"
 "checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
 "checksum string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25d70109977172b127fe834e5449e5ab1740b9ba49fa18a2020f509174f25423"
 "checksum string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "479cde50c3539481f33906a387f2bd17c8e87cb848c35b6021d41fb81ff9b4d7"
 "checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
 "checksum strsim 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b4d15c810519a91cf877e7e36e63fe068815c678181439f2f29e2562147c3694"
 "checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
 "checksum syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)" = "91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59"
+"checksum syn 0.14.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c67da57e61ebc7b7b6fff56bb34440ca3a83db037320b0507af4c10368deda7d"
 "checksum synstructure 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "98cad891cd238c98e1f0aec9f7c0f620aa696e4e5f7daba56ac67b5e86a6b049"
 "checksum tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "87974a6f5c1dfb344d733055601650059a3363de2a6104819293baff662132d6"
 "checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
 "checksum term_size 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2b6b55df3198cc93372e85dd2ed817f0e38ce8cc0f22eb32391bfad9c4bf209"
 "checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83"
 "checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
 "checksum thread_local 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "279ef31c19ededf577bfd12dfae728040a21f635b06a24cd670ff510edd38963"
 "checksum thread_profiler 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf947d192a9be60ef5131cc7a4648886ba89d712f16700ebbf80c8a69d05d48f"
copy from third_party/rust/proc-macro2/.cargo-checksum.json
copy to third_party/rust/proc-macro2-0.3.6/.cargo-checksum.json
copy from third_party/rust/proc-macro2/.travis.yml
copy to third_party/rust/proc-macro2-0.3.6/.travis.yml
copy from third_party/rust/proc-macro2/Cargo.toml
copy to third_party/rust/proc-macro2-0.3.6/Cargo.toml
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2-0.3.6/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+	http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/proc-macro2-0.3.6/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2014 Alex Crichton
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
copy from third_party/rust/proc-macro2/README.md
copy to third_party/rust/proc-macro2-0.3.6/README.md
copy from third_party/rust/proc-macro2/src/lib.rs
copy to third_party/rust/proc-macro2-0.3.6/src/lib.rs
copy from third_party/rust/proc-macro2/src/stable.rs
copy to third_party/rust/proc-macro2-0.3.6/src/stable.rs
copy from third_party/rust/proc-macro2/src/strnom.rs
copy to third_party/rust/proc-macro2-0.3.6/src/strnom.rs
copy from third_party/rust/proc-macro2/src/unstable.rs
copy to third_party/rust/proc-macro2-0.3.6/src/unstable.rs
copy from third_party/rust/proc-macro2/tests/test.rs
copy to third_party/rust/proc-macro2-0.3.6/tests/test.rs
--- a/third_party/rust/proc-macro2/.cargo-checksum.json
+++ b/third_party/rust/proc-macro2/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{".travis.yml":"872a0d195dcb1e84f28aa994f301c7139f70360bb42dee3954df5ee965efea15","Cargo.toml":"6ed5d7b9bf8805abd76f9e2a9be99b98e2cb70d9b97980b8aa09b6082d26a94d","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"ce05336717e1e90724491a2f54487c41c752fa2d32396639439f7c6d0f1e6776","src/lib.rs":"e99fedcb4b410c626fe1a3ab722c8b4f98baed2c64c2dff28c4eb62da354f2e2","src/stable.rs":"fd8d86f7542d211030056a7cdcc58b86131180d54f461910a4a067269eee9d4a","src/strnom.rs":"129fe22f0b50e5a64fca82e731c959135381c910e19f3305ef35420e0aadde08","src/unstable.rs":"b43c713ac16d9de0ba0fa1b9bebe390122b4ad60ef2fc75408f721305fdcd46b","tests/test.rs":"a8229931093cd6b39f759c60ef097e59bc43c98f1b0e5eea06ecc8d5d0879853"},"package":"49b6a521dc81b643e9a51e0d1cf05df46d5a2f3c0280ea72bcb68276ba64a118"}
\ No newline at end of file
+{"files":{".travis.yml":"a7e89030a6a25e881fd6ccc0f065e2990eb408e26a9cbdcdf7a73b201285ab0f","Cargo.toml":"a07d0acb6f3035bbd30180e3493f8ad05f5e8ceed8970e00ee3e3ce4c3427a0a","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"261fb7bbe050bbff8a8e33da68926b44cd1bbd2b1e8b655d19ae681b8fff3c6e","src/lib.rs":"8fa6ba7df93c3ee57163f406f73fb5efa2b41709b7d27d25a46629038cabf339","src/stable.rs":"c325eadc1f0a78c55117589e6bacb72dd295ccd02cb3e2dea13e1381ad2e972e","src/strnom.rs":"807c377bdb49b8b1c67d013089b8ff33fe93ffd3fa36b6440dbb1d6fe8cd9c17","src/unstable.rs":"69ce792a9d8a9caeb43f598923f34c986628f3416355e118f256263afe870e13","tests/test.rs":"40486961d171ea6312cf46c63834738e2bec07fee9badb677ffa28073cc09e8d"},"package":"effdb53b25cdad54f8f48843d67398f7ef2e14f12c1b4cb4effc549a6462a4d6"}
\ No newline at end of file
--- a/third_party/rust/proc-macro2/.travis.yml
+++ b/third_party/rust/proc-macro2/.travis.yml
@@ -2,31 +2,26 @@ language: rust
 sudo: false
 
 matrix:
   include:
     - rust: 1.15.0
     - rust: stable
     - rust: beta
     - rust: nightly
-      before_script:
-        - pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
       script:
         - cargo test
-        - cargo build --features nightly
-        - cargo build --no-default-features
+        - cargo test --features nightly
+        - cargo test --no-default-features
         - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
-        - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build --features nightly
-        - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo doc --no-deps
-      after_success:
-        - travis-cargo --only nightly doc-upload
+        - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test --features nightly
+        - cargo update -Z minimal-versions && cargo build
+
+before_script:
+  - set -o errexit
 
 script:
   - cargo test
   - RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo test
-env:
-  global:
-    - TRAVIS_CARGO_NIGHTLY_FEATURE=""
-    - secure: "NAsZghAVTAksrm4WP4I66VmD2wW0eRbwB+ZKHUQfvbgUaCRvVdp4WBbWXGU/f/yHgDFWZwljWR4iPMiBwAK8nZsQFRuLFdHrOOHqbkj639LLdT9A07s1zLMB1GfR1fDttzrGhm903pbT2yxSyqqpahGYM7TaGDYYmKYIk4XyVNA5F5Sk7RI+rCecKraoYDeUEFbjWWYtU2FkEXsELEKj0emX5reWkR+wja3QokFcRZ25+Zd2dRC0K8W5QcY2UokLzKncBMCTC5q70H616S3r/9qW67Si1njsJ7RzP0NlZQUNQ/VCvwr4LCr9w+AD9i1SZtXxuux77tWEWSJvBzUc82dDMUv/floJuF7HTulSxxQoRm+fbzpXj9mgaJNiUHXru6ZRTCRVRUSXpcAco94bVoy/jnjrTe3jgAIZK5w14zA8yLw1Jxof31DlbcWORxgF+6fnY2nKPRN2oiQ50+jm1AuGDZX59/wMiu1QlkjOBHtikHp+u+7mp3SkkM04DvuQ/tWODQQnOOtrA0EB3i5H1zeTSnUcmbJufUljWWOvF1QYII08MccqwfG1KWbpobvdu+cV2iVhkq/lNCEL3Ai101CnmSCnMz+9oK/XxYOrx2TnaD9ootOKgnk7XWxF19GZecQx6O2hHTouxvB/0KcRPGWmMWl0H88f3T/Obql8bG8="
 
 notifications:
   email:
     on_success: never
--- a/third_party/rust/proc-macro2/Cargo.toml
+++ b/third_party/rust/proc-macro2/Cargo.toml
@@ -7,25 +7,27 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "proc-macro2"
-version = "0.3.6"
+version = "0.4.6"
 authors = ["Alex Crichton <alex@alexcrichton.com>"]
 description = "A stable implementation of the upcoming new `proc_macro` API. Comes with an\noption, off by default, to also reimplement itself in terms of the upstream\nunstable API.\n"
 homepage = "https://github.com/alexcrichton/proc-macro2"
 documentation = "https://docs.rs/proc-macro2"
 readme = "README.md"
 keywords = ["macros"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/alexcrichton/proc-macro2"
+[package.metadata.docs.rs]
+rustdoc-args = ["--cfg", "procmacro2_semver_exempt"]
 
 [lib]
 doctest = false
 [dependencies.unicode-xid]
 version = "0.1"
 
 [features]
 default = ["proc-macro"]
--- a/third_party/rust/proc-macro2/README.md
+++ b/third_party/rust/proc-macro2/README.md
@@ -12,27 +12,27 @@ interface][upcoming].
 
 The upcoming support has features like:
 
 * Span information on tokens
 * No need to go in/out through strings
 * Structured input/output
 
 The hope is that libraries ported to `proc_macro2` will be trivial to port to
-the real `proc_macro` crate once the support on nightly is stabilize.
+the real `proc_macro` crate once the support on nightly is stabilized.
 
 ## Usage
 
 This crate by default compiles on the stable version of the compiler. It only
 uses the stable surface area of the `proc_macro` crate upstream in the compiler
 itself. Usage is done via:
 
 ```toml
 [dependencies]
-proc-macro2 = "0.3"
+proc-macro2 = "0.4"
 ```
 
 followed by
 
 ```rust
 extern crate proc_macro;
 extern crate proc_macro2;
 
@@ -52,17 +52,17 @@ If you'd like you can enable the `nightl
 cause it to compile against the **unstable and nightly-only** features of the
 `proc_macro` crate. This in turn requires a nightly compiler. This should help
 preserve span information, however, coming in from the compiler itself.
 
 You can enable this feature via:
 
 ```toml
 [dependencies]
-proc-macro2 = { version = "0.3", features = ["nightly"] }
+proc-macro2 = { version = "0.4", features = ["nightly"] }
 ```
 
 
 ## Unstable Features
 
 `proc-macro2` supports exporting some methods from `proc_macro` which are
 currently highly unstable, and may not be stabilized in the first pass of
 `proc_macro` stabilizations. These features are not exported by default. Minor
--- a/third_party/rust/proc-macro2/src/lib.rs
+++ b/third_party/rust/proc-macro2/src/lib.rs
@@ -15,74 +15,122 @@
 //! to use this crate will be trivially able to switch to the upstream
 //! `proc_macro` crate once its API stabilizes.
 //!
 //! In the meantime this crate also has a `nightly` Cargo feature which
 //! enables it to reimplement itself with the unstable API of [`proc_macro`].
 //! This'll allow immediate usage of the beneficial upstream API, particularly
 //! around preserving span information.
 //!
+//! # Unstable Features
+//!
+//! `proc-macro2` supports exporting some methods from `proc_macro` which are
+//! currently highly unstable, and may not be stabilized in the first pass of
+//! `proc_macro` stabilizations. These features are not exported by default.
+//! Minor versions of `proc-macro2` may make breaking changes to them at any
+//! time.
+//!
+//! To enable these features, the `procmacro2_semver_exempt` config flag must be
+//! passed to rustc.
+//!
+//! ```sh
+//! RUSTFLAGS='--cfg procmacro2_semver_exempt' cargo build
+//! ```
+//!
+//! Note that this must not only be done for your crate, but for any crate that
+//! depends on your crate. This infectious nature is intentional, as it serves
+//! as a reminder that you are outside of the normal semver guarantees.
+//!
 //! [`proc_macro`]: https://doc.rust-lang.org/proc_macro/
 //! [ts]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
 
 // Proc-macro2 types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/proc-macro2/0.3.6")]
+#![doc(html_root_url = "https://docs.rs/proc-macro2/0.4.6")]
 #![cfg_attr(feature = "nightly", feature(proc_macro))]
 
 #[cfg(feature = "proc-macro")]
 extern crate proc_macro;
-
-#[cfg(not(feature = "nightly"))]
 extern crate unicode_xid;
 
+use std::cmp::Ordering;
 use std::fmt;
+use std::hash::{Hash, Hasher};
 use std::iter::FromIterator;
 use std::marker;
 use std::rc::Rc;
 use std::str::FromStr;
 
 #[macro_use]
-#[cfg(not(feature = "nightly"))]
 mod strnom;
+mod stable;
 
-#[path = "stable.rs"]
 #[cfg(not(feature = "nightly"))]
-mod imp;
+use stable as imp;
 #[path = "unstable.rs"]
 #[cfg(feature = "nightly")]
 mod imp;
 
+/// An abstract stream of tokens, or more concretely a sequence of token trees.
+///
+/// This type provides interfaces for iterating over token trees and for
+/// collecting token trees into one stream.
+///
+/// Token stream is both the input and output of `#[proc_macro]`,
+/// `#[proc_macro_attribute]` and `#[proc_macro_derive]` definitions.
 #[derive(Clone)]
 pub struct TokenStream {
     inner: imp::TokenStream,
     _marker: marker::PhantomData<Rc<()>>,
 }
 
+/// Error returned from `TokenStream::from_str`.
 pub struct LexError {
     inner: imp::LexError,
     _marker: marker::PhantomData<Rc<()>>,
 }
 
 impl TokenStream {
     fn _new(inner: imp::TokenStream) -> TokenStream {
         TokenStream {
             inner: inner,
             _marker: marker::PhantomData,
         }
     }
 
-    pub fn empty() -> TokenStream {
-        TokenStream::_new(imp::TokenStream::empty())
+    fn _new_stable(inner: stable::TokenStream) -> TokenStream {
+        TokenStream {
+            inner: inner.into(),
+            _marker: marker::PhantomData,
+        }
     }
 
+    /// Returns an empty `TokenStream` containing no token trees.
+    pub fn new() -> TokenStream {
+        TokenStream::_new(imp::TokenStream::new())
+    }
+
+    #[deprecated(since = "0.4.4", note = "please use TokenStream::new")]
+    pub fn empty() -> TokenStream {
+        TokenStream::new()
+    }
+
+    /// Checks if this `TokenStream` is empty.
     pub fn is_empty(&self) -> bool {
         self.inner.is_empty()
     }
 }
 
+/// Attempts to break the string into tokens and parse those tokens into a token
+/// stream.
+///
+/// May fail for a number of reasons, for example, if the string contains
+/// unbalanced delimiters or characters not existing in the language.
+///
+/// NOTE: Some errors may cause panics instead of returning `LexError`. We
+/// reserve the right to change these errors into `LexError`s later.
 impl FromStr for TokenStream {
     type Err = LexError;
 
     fn from_str(src: &str) -> Result<TokenStream, LexError> {
         let e = src.parse().map_err(|e| LexError {
             inner: e,
             _marker: marker::PhantomData,
         })?;
@@ -99,55 +147,84 @@ impl From<proc_macro::TokenStream> for T
 
 #[cfg(feature = "proc-macro")]
 impl From<TokenStream> for proc_macro::TokenStream {
     fn from(inner: TokenStream) -> proc_macro::TokenStream {
         inner.inner.into()
     }
 }
 
+impl Extend<TokenTree> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+        self.inner.extend(streams)
+    }
+}
+
+/// Collects a number of token trees into a single stream.
 impl FromIterator<TokenTree> for TokenStream {
     fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
         TokenStream::_new(streams.into_iter().collect())
     }
 }
 
+/// Prints the token stream as a string that is supposed to be losslessly
+/// convertible back into the same token stream (modulo spans), except for
+/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+/// numeric literals.
 impl fmt::Display for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
     }
 }
 
+/// Prints token in a form convenient for debugging.
 impl fmt::Debug for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
     }
 }
 
 impl fmt::Debug for LexError {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
     }
 }
 
 // Returned by reference, so we can't easily wrap it.
 #[cfg(procmacro2_semver_exempt)]
 pub use imp::FileName;
 
+/// The source file of a given `Span`.
+///
+/// This type is semver exempt and not exposed by default.
 #[cfg(procmacro2_semver_exempt)]
 #[derive(Clone, PartialEq, Eq)]
 pub struct SourceFile(imp::SourceFile);
 
 #[cfg(procmacro2_semver_exempt)]
 impl SourceFile {
-    /// Get the path to this source file as a string.
+    /// Get the path to this source file.
+    ///
+    /// ### Note
+    ///
+    /// If the code span associated with this `SourceFile` was generated by an
+    /// external macro, this may not be an actual path on the filesystem. Use
+    /// [`is_real`] to check.
+    ///
+    /// Also note that even if `is_real` returns `true`, if
+    /// `--remap-path-prefix` was passed on the command line, the path as given
+    /// may not actually be valid.
+    ///
+    /// [`is_real`]: #method.is_real
     pub fn path(&self) -> &FileName {
         self.0.path()
     }
 
+    /// Returns `true` if this source file is a real source file, and not
+    /// generated by an external macro's expansion.
     pub fn is_real(&self) -> bool {
         self.0.is_real()
     }
 }
 
 #[cfg(procmacro2_semver_exempt)]
 impl AsRef<FileName> for SourceFile {
     fn as_ref(&self) -> &FileName {
@@ -157,337 +234,708 @@ impl AsRef<FileName> for SourceFile {
 
 #[cfg(procmacro2_semver_exempt)]
 impl fmt::Debug for SourceFile {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.0.fmt(f)
     }
 }
 
+/// A line-column pair representing the start or end of a `Span`.
+///
+/// This type is semver exempt and not exposed by default.
 #[cfg(procmacro2_semver_exempt)]
 pub struct LineColumn {
+    /// The 1-indexed line in the source file on which the span starts or ends
+    /// (inclusive).
     pub line: usize,
+    /// The 0-indexed column (in UTF-8 characters) in the source file on which
+    /// the span starts or ends (inclusive).
     pub column: usize,
 }
 
+/// A region of source code, along with macro expansion information.
 #[derive(Copy, Clone)]
 pub struct Span {
     inner: imp::Span,
     _marker: marker::PhantomData<Rc<()>>,
 }
 
 impl Span {
     fn _new(inner: imp::Span) -> Span {
         Span {
             inner: inner,
             _marker: marker::PhantomData,
         }
     }
 
+    fn _new_stable(inner: stable::Span) -> Span {
+        Span {
+            inner: inner.into(),
+            _marker: marker::PhantomData,
+        }
+    }
+
+    /// The span of the invocation of the current procedural macro.
+    ///
+    /// Identifiers created with this span will be resolved as if they were
+    /// written directly at the macro call location (call-site hygiene) and
+    /// other code at the macro call site will be able to refer to them as well.
     pub fn call_site() -> Span {
         Span::_new(imp::Span::call_site())
     }
 
+    /// A span that resolves at the macro definition site.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn def_site() -> Span {
         Span::_new(imp::Span::def_site())
     }
 
     /// Creates a new span with the same line/column information as `self` but
     /// that resolves symbols as though it were at `other`.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn resolved_at(&self, other: Span) -> Span {
         Span::_new(self.inner.resolved_at(other.inner))
     }
 
     /// Creates a new span with the same name resolution behavior as `self` but
     /// with the line/column information of `other`.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn located_at(&self, other: Span) -> Span {
         Span::_new(self.inner.located_at(other.inner))
     }
 
     /// This method is only available when the `"nightly"` feature is enabled.
     #[cfg(all(feature = "nightly", feature = "proc-macro"))]
     pub fn unstable(self) -> proc_macro::Span {
         self.inner.unstable()
     }
 
+    /// The original source file into which this span points.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn source_file(&self) -> SourceFile {
         SourceFile(self.inner.source_file())
     }
 
+    /// Get the starting line/column in the source file for this span.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn start(&self) -> LineColumn {
         let imp::LineColumn { line, column } = self.inner.start();
         LineColumn {
             line: line,
             column: column,
         }
     }
 
+    /// Get the ending line/column in the source file for this span.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn end(&self) -> LineColumn {
         let imp::LineColumn { line, column } = self.inner.end();
         LineColumn {
             line: line,
             column: column,
         }
     }
 
+    /// Create a new span encompassing `self` and `other`.
+    ///
+    /// Returns `None` if `self` and `other` are from different files.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn join(&self, other: Span) -> Option<Span> {
         self.inner.join(other.inner).map(Span::_new)
     }
 
+    /// Compares to spans to see if they're equal.
+    ///
+    /// This method is semver exempt and not exposed by default.
     #[cfg(procmacro2_semver_exempt)]
     pub fn eq(&self, other: &Span) -> bool {
         self.inner.eq(&other.inner)
     }
 }
 
+/// Prints a span in a form convenient for debugging.
 impl fmt::Debug for Span {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
     }
 }
 
-#[derive(Clone, Debug)]
+/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
+#[derive(Clone)]
 pub enum TokenTree {
+    /// A token stream surrounded by bracket delimiters.
     Group(Group),
-    Term(Term),
-    Op(Op),
+    /// An identifier.
+    Ident(Ident),
+    /// A single punctuation character (`+`, `,`, `$`, etc.).
+    Punct(Punct),
+    /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
     Literal(Literal),
 }
 
 impl TokenTree {
+    /// Returns the span of this tree, delegating to the `span` method of
+    /// the contained token or a delimited stream.
     pub fn span(&self) -> Span {
         match *self {
             TokenTree::Group(ref t) => t.span(),
-            TokenTree::Term(ref t) => t.span(),
-            TokenTree::Op(ref t) => t.span(),
+            TokenTree::Ident(ref t) => t.span(),
+            TokenTree::Punct(ref t) => t.span(),
             TokenTree::Literal(ref t) => t.span(),
         }
     }
 
+    /// Configures the span for *only this token*.
+    ///
+    /// Note that if this token is a `Group` then this method will not configure
+    /// the span of each of the internal tokens, this will simply delegate to
+    /// the `set_span` method of each variant.
     pub fn set_span(&mut self, span: Span) {
         match *self {
             TokenTree::Group(ref mut t) => t.set_span(span),
-            TokenTree::Term(ref mut t) => t.set_span(span),
-            TokenTree::Op(ref mut t) => t.set_span(span),
+            TokenTree::Ident(ref mut t) => t.set_span(span),
+            TokenTree::Punct(ref mut t) => t.set_span(span),
             TokenTree::Literal(ref mut t) => t.set_span(span),
         }
     }
 }
 
 impl From<Group> for TokenTree {
     fn from(g: Group) -> TokenTree {
         TokenTree::Group(g)
     }
 }
 
-impl From<Term> for TokenTree {
-    fn from(g: Term) -> TokenTree {
-        TokenTree::Term(g)
+impl From<Ident> for TokenTree {
+    fn from(g: Ident) -> TokenTree {
+        TokenTree::Ident(g)
     }
 }
 
-impl From<Op> for TokenTree {
-    fn from(g: Op) -> TokenTree {
-        TokenTree::Op(g)
+impl From<Punct> for TokenTree {
+    fn from(g: Punct) -> TokenTree {
+        TokenTree::Punct(g)
     }
 }
 
 impl From<Literal> for TokenTree {
     fn from(g: Literal) -> TokenTree {
         TokenTree::Literal(g)
     }
 }
 
+/// Prints the token tree as a string that is supposed to be losslessly
+/// convertible back into the same token tree (modulo spans), except for
+/// possibly `TokenTree::Group`s with `Delimiter::None` delimiters and negative
+/// numeric literals.
 impl fmt::Display for TokenTree {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         match *self {
             TokenTree::Group(ref t) => t.fmt(f),
-            TokenTree::Term(ref t) => t.fmt(f),
-            TokenTree::Op(ref t) => t.fmt(f),
+            TokenTree::Ident(ref t) => t.fmt(f),
+            TokenTree::Punct(ref t) => t.fmt(f),
             TokenTree::Literal(ref t) => t.fmt(f),
         }
     }
 }
 
-#[derive(Clone, Debug)]
+/// Prints token tree in a form convenient for debugging.
+impl fmt::Debug for TokenTree {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        // Each of these has the name in the struct type in the derived debug,
+        // so don't bother with an extra layer of indirection
+        match *self {
+            TokenTree::Group(ref t) => t.fmt(f),
+            TokenTree::Ident(ref t) => {
+                let mut debug = f.debug_struct("Ident");
+                debug.field("sym", &format_args!("{}", t));
+                #[cfg(any(feature = "nightly", procmacro2_semver_exempt))]
+                debug.field("span", &t.span());
+                debug.finish()
+            }
+            TokenTree::Punct(ref t) => t.fmt(f),
+            TokenTree::Literal(ref t) => t.fmt(f),
+        }
+    }
+}
+
+/// A delimited token stream.
+///
+/// A `Group` internally contains a `TokenStream` which is surrounded by
+/// `Delimiter`s.
+#[derive(Clone)]
 pub struct Group {
     delimiter: Delimiter,
     stream: TokenStream,
     span: Span,
 }
 
+/// Describes how a sequence of token trees is delimited.
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum Delimiter {
+    /// `( ... )`
     Parenthesis,
+    /// `{ ... }`
     Brace,
+    /// `[ ... ]`
     Bracket,
+    /// `Ø ... Ø`
+    ///
+    /// An implicit delimiter, that may, for example, appear around tokens
+    /// coming from a "macro variable" `$var`. It is important to preserve
+    /// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`.
+    /// Implicit delimiters may not survive roundtrip of a token stream through
+    /// a string.
     None,
 }
 
 impl Group {
+    /// Creates a new `Group` with the given delimiter and token stream.
+    ///
+    /// This constructor will set the span for this group to
+    /// `Span::call_site()`. To change the span you can use the `set_span`
+    /// method below.
     pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
         Group {
             delimiter: delimiter,
             stream: stream,
             span: Span::call_site(),
         }
     }
 
+    /// Returns the delimiter of this `Group`
     pub fn delimiter(&self) -> Delimiter {
         self.delimiter
     }
 
+    /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+    ///
+    /// Note that the returned token stream does not include the delimiter
+    /// returned above.
     pub fn stream(&self) -> TokenStream {
         self.stream.clone()
     }
 
+    /// Returns the span for the delimiters of this token stream, spanning the
+    /// entire `Group`.
     pub fn span(&self) -> Span {
         self.span
     }
 
+    /// Configures the span for this `Group`'s delimiters, but not its internal
+    /// tokens.
+    ///
+    /// This method will **not** set the span of all the internal tokens spanned
+    /// by this group, but rather it will only set the span of the delimiter
+    /// tokens at the level of the `Group`.
     pub fn set_span(&mut self, span: Span) {
         self.span = span;
     }
 }
 
+/// Prints the group as a string that should be losslessly convertible back
+/// into the same group (modulo spans), except for possibly `TokenTree::Group`s
+/// with `Delimiter::None` delimiters.
 impl fmt::Display for Group {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.stream.fmt(f)
+        let (left, right) = match self.delimiter {
+            Delimiter::Parenthesis => ("(", ")"),
+            Delimiter::Brace => ("{", "}"),
+            Delimiter::Bracket => ("[", "]"),
+            Delimiter::None => ("", ""),
+        };
+
+        f.write_str(left)?;
+        self.stream.fmt(f)?;
+        f.write_str(right)?;
+
+        Ok(())
     }
 }
 
-#[derive(Copy, Clone, Debug)]
-pub struct Op {
+impl fmt::Debug for Group {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        let mut debug = fmt.debug_struct("Group");
+        debug.field("delimiter", &self.delimiter);
+        debug.field("stream", &self.stream);
+        #[cfg(procmacro2_semver_exempt)]
+        debug.field("span", &self.span);
+        debug.finish()
+    }
+}
+
+/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
+///
+/// Multicharacter operators like `+=` are represented as two instances of
+/// `Punct` with different forms of `Spacing` returned.
+#[derive(Clone)]
+pub struct Punct {
     op: char,
     spacing: Spacing,
     span: Span,
 }
 
+/// Whether an `Punct` is followed immediately by another `Punct` or followed by
+/// another token or whitespace.
 #[derive(Copy, Clone, Debug, Eq, PartialEq)]
 pub enum Spacing {
+    /// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
     Alone,
+    /// E.g. `+` is `Joint` in `+=` or `'#`.
+    ///
+    /// Additionally, single quote `'` can join with identifiers to form
+    /// lifetimes `'ident`.
     Joint,
 }
 
-impl Op {
-    pub fn new(op: char, spacing: Spacing) -> Op {
-        Op {
+impl Punct {
+    /// Creates a new `Punct` from the given character and spacing.
+    ///
+    /// The `ch` argument must be a valid punctuation character permitted by the
+    /// language, otherwise the function will panic.
+    ///
+    /// The returned `Punct` will have the default span of `Span::call_site()`
+    /// which can be further configured with the `set_span` method below.
+    pub fn new(op: char, spacing: Spacing) -> Punct {
+        Punct {
             op: op,
             spacing: spacing,
             span: Span::call_site(),
         }
     }
 
-    pub fn op(&self) -> char {
+    /// Returns the value of this punctuation character as `char`.
+    pub fn as_char(&self) -> char {
         self.op
     }
 
+    /// Returns the spacing of this punctuation character, indicating whether
+    /// it's immediately followed by another `Punct` in the token stream, so
+    /// they can potentially be combined into a multicharacter operator
+    /// (`Joint`), or it's followed by some other token or whitespace (`Alone`)
+    /// so the operator has certainly ended.
     pub fn spacing(&self) -> Spacing {
         self.spacing
     }
 
+    /// Returns the span for this punctuation character.
     pub fn span(&self) -> Span {
         self.span
     }
 
+    /// Configure the span for this punctuation character.
     pub fn set_span(&mut self, span: Span) {
         self.span = span;
     }
 }
 
-impl fmt::Display for Op {
+/// Prints the punctuation character as a string that should be losslessly
+/// convertible back into the same character.
+impl fmt::Display for Punct {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.op.fmt(f)
     }
 }
 
-#[derive(Copy, Clone)]
-pub struct Term {
-    inner: imp::Term,
+impl fmt::Debug for Punct {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        let mut debug = fmt.debug_struct("Punct");
+        debug.field("op", &self.op);
+        debug.field("spacing", &self.spacing);
+        #[cfg(procmacro2_semver_exempt)]
+        debug.field("span", &self.span);
+        debug.finish()
+    }
+}
+
+/// A word of Rust code, which may be a keyword or legal variable name.
+///
+/// An identifier consists of at least one Unicode code point, the first of
+/// which has the XID_Start property and the rest of which have the XID_Continue
+/// property.
+///
+/// - The empty string is not an identifier. Use `Option<Ident>`.
+/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
+///
+/// An identifier constructed with `Ident::new` is permitted to be a Rust
+/// keyword, though parsing one through its [`Synom`] implementation rejects
+/// Rust keywords. Use `call!(Ident::parse_any)` when parsing to match the
+/// behaviour of `Ident::new`.
+///
+/// [`Synom`]: https://docs.rs/syn/0.14/syn/synom/trait.Synom.html
+///
+/// # Examples
+///
+/// A new ident can be created from a string using the `Ident::new` function.
+/// A span must be provided explicitly which governs the name resolution
+/// behavior of the resulting identifier.
+///
+/// ```rust
+/// extern crate proc_macro2;
+///
+/// use proc_macro2::{Ident, Span};
+///
+/// fn main() {
+///     let call_ident = Ident::new("calligraphy", Span::call_site());
+///
+///     println!("{}", call_ident);
+/// }
+/// ```
+///
+/// An ident can be interpolated into a token stream using the `quote!` macro.
+///
+/// ```rust
+/// #[macro_use]
+/// extern crate quote;
+///
+/// extern crate proc_macro2;
+///
+/// use proc_macro2::{Ident, Span};
+///
+/// fn main() {
+///     let ident = Ident::new("demo", Span::call_site());
+///
+///     // Create a variable binding whose name is this ident.
+///     let expanded = quote! { let #ident = 10; };
+///
+///     // Create a variable binding with a slightly different name.
+///     let temp_ident = Ident::new(&format!("new_{}", ident), Span::call_site());
+///     let expanded = quote! { let #temp_ident = 10; };
+/// }
+/// ```
+///
+/// A string representation of the ident is available through the `to_string()`
+/// method.
+///
+/// ```rust
+/// # extern crate proc_macro2;
+/// #
+/// # use proc_macro2::{Ident, Span};
+/// #
+/// # let ident = Ident::new("another_identifier", Span::call_site());
+/// #
+/// // Examine the ident as a string.
+/// let ident_string = ident.to_string();
+/// if ident_string.len() > 60 {
+///     println!("Very long identifier: {}", ident_string)
+/// }
+/// ```
+#[derive(Clone)]
+pub struct Ident {
+    inner: imp::Ident,
     _marker: marker::PhantomData<Rc<()>>,
 }
 
-impl Term {
-    fn _new(inner: imp::Term) -> Term {
-        Term {
+impl Ident {
+    fn _new(inner: imp::Ident) -> Ident {
+        Ident {
             inner: inner,
             _marker: marker::PhantomData,
         }
     }
 
-    pub fn new(string: &str, span: Span) -> Term {
-        Term::_new(imp::Term::new(string, span.inner))
+    /// Creates a new `Ident` with the given `string` as well as the specified
+    /// `span`.
+    ///
+    /// The `string` argument must be a valid identifier permitted by the
+    /// language, otherwise the function will panic.
+    ///
+    /// Note that `span`, currently in rustc, configures the hygiene information
+    /// for this identifier.
+    ///
+    /// As of this time `Span::call_site()` explicitly opts-in to "call-site"
+    /// hygiene meaning that identifiers created with this span will be resolved
+    /// as if they were written directly at the location of the macro call, and
+    /// other code at the macro call site will be able to refer to them as well.
+    ///
+    /// Later spans like `Span::def_site()` will allow to opt-in to
+    /// "definition-site" hygiene meaning that identifiers created with this
+    /// span will be resolved at the location of the macro definition and other
+    /// code at the macro call site will not be able to refer to them.
+    ///
+    /// Due to the current importance of hygiene this constructor, unlike other
+    /// tokens, requires a `Span` to be specified at construction.
+    ///
+    /// # Panics
+    ///
+    /// Panics if the input string is neither a keyword nor a legal variable
+    /// name.
+    pub fn new(string: &str, span: Span) -> Ident {
+        Ident::_new(imp::Ident::new(string, span.inner))
     }
 
-    pub fn as_str(&self) -> &str {
-        self.inner.as_str()
+    /// Same as `Ident::new`, but creates a raw identifier (`r#ident`).
+    ///
+    /// This method is semver exempt and not exposed by default.
+    #[cfg(procmacro2_semver_exempt)]
+    pub fn new_raw(string: &str, span: Span) -> Ident {
+        Ident::_new_raw(string, span)
     }
 
+    fn _new_raw(string: &str, span: Span) -> Ident {
+        Ident::_new(imp::Ident::new_raw(string, span.inner))
+    }
+
+    /// Returns the span of this `Ident`.
     pub fn span(&self) -> Span {
         Span::_new(self.inner.span())
     }
 
+    /// Configures the span of this `Ident`, possibly changing its hygiene
+    /// context.
     pub fn set_span(&mut self, span: Span) {
         self.inner.set_span(span.inner);
     }
 }
 
-impl fmt::Display for Term {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.as_str().fmt(f)
+impl PartialEq for Ident {
+    fn eq(&self, other: &Ident) -> bool {
+        self.to_string() == other.to_string()
+    }
+}
+
+impl<T> PartialEq<T> for Ident
+where
+    T: ?Sized + AsRef<str>,
+{
+    fn eq(&self, other: &T) -> bool {
+        self.to_string() == other.as_ref()
     }
 }
 
-impl fmt::Debug for Term {
+impl Eq for Ident {}
+
+impl PartialOrd for Ident {
+    fn partial_cmp(&self, other: &Ident) -> Option<Ordering> {
+        Some(self.cmp(other))
+    }
+}
+
+impl Ord for Ident {
+    fn cmp(&self, other: &Ident) -> Ordering {
+        self.to_string().cmp(&other.to_string())
+    }
+}
+
+impl Hash for Ident {
+    fn hash<H: Hasher>(&self, hasher: &mut H) {
+        self.to_string().hash(hasher)
+    }
+}
+
+/// Prints the identifier as a string that should be losslessly convertible back
+/// into the same identifier.
+impl fmt::Display for Ident {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
     }
 }
 
+impl fmt::Debug for Ident {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        self.inner.fmt(f)
+    }
+}
+
+/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
+/// byte character (`b'a'`), an integer or floating point number with or without
+/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
+///
+/// Boolean literals like `true` and `false` do not belong here, they are
+/// `Ident`s.
 #[derive(Clone)]
 pub struct Literal {
     inner: imp::Literal,
     _marker: marker::PhantomData<Rc<()>>,
 }
 
-macro_rules! int_literals {
+macro_rules! suffixed_int_literals {
     ($($name:ident => $kind:ident,)*) => ($(
+        /// Creates a new suffixed integer literal with the specified value.
+        ///
+        /// This function will create an integer like `1u32` where the integer
+        /// value specified is the first part of the token and the integral is
+        /// also suffixed at the end. Literals created from negative numbers may
+        /// not survive rountrips through `TokenStream` or strings and may be
+        /// broken into two tokens (`-` and positive literal).
+        ///
+        /// Literals created through this method have the `Span::call_site()`
+        /// span by default, which can be configured with the `set_span` method
+        /// below.
+        pub fn $name(n: $kind) -> Literal {
+            Literal::_new(imp::Literal::$name(n))
+        }
+    )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+    ($($name:ident => $kind:ident,)*) => ($(
+        /// Creates a new unsuffixed integer literal with the specified value.
+        ///
+        /// This function will create an integer like `1` where the integer
+        /// value specified is the first part of the token. No suffix is
+        /// specified on this token, meaning that invocations like
+        /// `Literal::i8_unsuffixed(1)` are equivalent to
+        /// `Literal::u32_unsuffixed(1)`. Literals created from negative numbers
+        /// may not survive rountrips through `TokenStream` or strings and may
+        /// be broken into two tokens (`-` and positive literal).
+        ///
+        /// Literals created through this method have the `Span::call_site()`
+        /// span by default, which can be configured with the `set_span` method
+        /// below.
         pub fn $name(n: $kind) -> Literal {
             Literal::_new(imp::Literal::$name(n))
         }
     )*)
 }
 
 impl Literal {
     fn _new(inner: imp::Literal) -> Literal {
         Literal {
             inner: inner,
             _marker: marker::PhantomData,
         }
     }
 
-    int_literals! {
+    fn _new_stable(inner: stable::Literal) -> Literal {
+        Literal {
+            inner: inner.into(),
+            _marker: marker::PhantomData,
+        }
+    }
+
+    suffixed_int_literals! {
         u8_suffixed => u8,
         u16_suffixed => u16,
         u32_suffixed => u32,
         u64_suffixed => u64,
         usize_suffixed => usize,
         i8_suffixed => i8,
         i16_suffixed => i16,
         i32_suffixed => i32,
         i64_suffixed => i64,
         isize_suffixed => isize,
+    }
 
+    unsuffixed_int_literals! {
         u8_unsuffixed => u8,
         u16_unsuffixed => u16,
         u32_unsuffixed => u32,
         u64_unsuffixed => u64,
         usize_unsuffixed => usize,
         i8_unsuffixed => i8,
         i16_unsuffixed => i16,
         i32_unsuffixed => i32,
@@ -500,16 +948,29 @@ impl Literal {
         Literal::_new(imp::Literal::f64_unsuffixed(f))
     }
 
     pub fn f64_suffixed(f: f64) -> Literal {
         assert!(f.is_finite());
         Literal::_new(imp::Literal::f64_suffixed(f))
     }
 
+    /// Creates a new unsuffixed floating-point literal.
+    ///
+    /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+    /// the float's value is emitted directly into the token but no suffix is
+    /// used, so it may be inferred to be a `f64` later in the compiler.
+    /// Literals created from negative numbers may not survive rountrips through
+    /// `TokenStream` or strings and may be broken into two tokens (`-` and
+    /// positive literal).
+    ///
+    /// # Panics
+    ///
+    /// This function requires that the specified float is finite, for example
+    /// if it is infinity or NaN this function will panic.
     pub fn f32_unsuffixed(f: f32) -> Literal {
         assert!(f.is_finite());
         Literal::_new(imp::Literal::f32_unsuffixed(f))
     }
 
     pub fn f32_suffixed(f: f32) -> Literal {
         assert!(f.is_finite());
         Literal::_new(imp::Literal::f32_suffixed(f))
@@ -543,25 +1004,30 @@ impl fmt::Debug for Literal {
 }
 
 impl fmt::Display for Literal {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.inner.fmt(f)
     }
 }
 
+/// Public implementation details for the `TokenStream` type, such as iterators.
 pub mod token_stream {
     use std::fmt;
     use std::marker;
     use std::rc::Rc;
 
+    use imp;
     pub use TokenStream;
     use TokenTree;
-    use imp;
 
+    /// An iterator over `TokenStream`'s `TokenTree`s.
+    ///
+    /// The iteration is "shallow", e.g. the iterator doesn't recurse into
+    /// delimited groups, and returns whole groups as token trees.
     pub struct IntoIter {
         inner: imp::TokenTreeIter,
         _marker: marker::PhantomData<Rc<()>>,
     }
 
     impl Iterator for IntoIter {
         type Item = TokenTree;
 
--- a/third_party/rust/proc-macro2/src/stable.rs
+++ b/third_party/rust/proc-macro2/src/stable.rs
@@ -1,36 +1,34 @@
 #![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
 
-use std::borrow::Borrow;
+#[cfg(procmacro2_semver_exempt)]
 use std::cell::RefCell;
 #[cfg(procmacro2_semver_exempt)]
 use std::cmp;
-use std::collections::HashMap;
 use std::fmt;
 use std::iter;
-use std::rc::Rc;
 use std::str::FromStr;
 use std::vec;
 
 use strnom::{block_comment, skip_whitespace, whitespace, word_break, Cursor, PResult};
 use unicode_xid::UnicodeXID;
 
-use {Delimiter, Group, Op, Spacing, TokenTree};
+use {Delimiter, Group, Punct, Spacing, TokenTree};
 
-#[derive(Clone, Debug)]
+#[derive(Clone)]
 pub struct TokenStream {
     inner: Vec<TokenTree>,
 }
 
 #[derive(Debug)]
 pub struct LexError;
 
 impl TokenStream {
-    pub fn empty() -> TokenStream {
+    pub fn new() -> TokenStream {
         TokenStream { inner: Vec::new() }
     }
 
     pub fn is_empty(&self) -> bool {
         self.inner.len() == 0
     }
 }
 
@@ -60,17 +58,17 @@ impl FromStr for TokenStream {
         // Create a dummy file & add it to the codemap
         let cursor = get_cursor(src);
 
         match token_stream(cursor) {
             Ok((input, output)) => {
                 if skip_whitespace(input).len() != 0 {
                     Err(LexError)
                 } else {
-                    Ok(output.inner)
+                    Ok(output)
                 }
             }
             Err(LexError) => Err(LexError),
         }
     }
 }
 
 impl fmt::Display for TokenStream {
@@ -84,38 +82,45 @@ impl fmt::Display for TokenStream {
             match *tt {
                 TokenTree::Group(ref tt) => {
                     let (start, end) = match tt.delimiter() {
                         Delimiter::Parenthesis => ("(", ")"),
                         Delimiter::Brace => ("{", "}"),
                         Delimiter::Bracket => ("[", "]"),
                         Delimiter::None => ("", ""),
                     };
-                    if tt.stream().inner.inner.len() == 0 {
+                    if tt.stream().into_iter().next().is_none() {
                         write!(f, "{} {}", start, end)?
                     } else {
                         write!(f, "{} {} {}", start, tt.stream(), end)?
                     }
                 }
-                TokenTree::Term(ref tt) => write!(f, "{}", tt.as_str())?,
-                TokenTree::Op(ref tt) => {
-                    write!(f, "{}", tt.op())?;
+                TokenTree::Ident(ref tt) => write!(f, "{}", tt)?,
+                TokenTree::Punct(ref tt) => {
+                    write!(f, "{}", tt.as_char())?;
                     match tt.spacing() {
                         Spacing::Alone => {}
                         Spacing::Joint => joint = true,
                     }
                 }
                 TokenTree::Literal(ref tt) => write!(f, "{}", tt)?,
             }
         }
 
         Ok(())
     }
 }
 
+impl fmt::Debug for TokenStream {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        f.write_str("TokenStream ")?;
+        f.debug_list().entries(self.clone()).finish()
+    }
+}
+
 #[cfg(feature = "proc-macro")]
 impl From<::proc_macro::TokenStream> for TokenStream {
     fn from(inner: ::proc_macro::TokenStream) -> TokenStream {
         inner
             .to_string()
             .parse()
             .expect("compiler token stream parse failed")
     }
@@ -144,75 +149,79 @@ impl iter::FromIterator<TokenTree> for T
         for token in streams.into_iter() {
             v.push(token);
         }
 
         TokenStream { inner: v }
     }
 }
 
+impl Extend<TokenTree> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+        self.inner.extend(streams);
+    }
+}
+
 pub type TokenTreeIter = vec::IntoIter<TokenTree>;
 
 impl IntoIterator for TokenStream {
     type Item = TokenTree;
     type IntoIter = TokenTreeIter;
 
     fn into_iter(self) -> TokenTreeIter {
         self.inner.into_iter()
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 #[derive(Clone, PartialEq, Eq, Debug)]
 pub struct FileName(String);
 
-#[cfg(procmacro2_semver_exempt)]
+#[allow(dead_code)]
+pub fn file_name(s: String) -> FileName {
+    FileName(s)
+}
+
 impl fmt::Display for FileName {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.0.fmt(f)
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 #[derive(Clone, PartialEq, Eq)]
 pub struct SourceFile {
     name: FileName,
 }
 
-#[cfg(procmacro2_semver_exempt)]
 impl SourceFile {
     /// Get the path to this source file as a string.
     pub fn path(&self) -> &FileName {
         &self.name
     }
 
     pub fn is_real(&self) -> bool {
         // XXX(nika): Support real files in the future?
         false
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 impl AsRef<FileName> for SourceFile {
     fn as_ref(&self) -> &FileName {
         self.path()
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 impl fmt::Debug for SourceFile {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         f.debug_struct("SourceFile")
             .field("path", &self.path())
             .field("is_real", &self.is_real())
             .finish()
     }
 }
 
-#[cfg(procmacro2_semver_exempt)]
 #[derive(Clone, Copy, Debug, PartialEq, Eq)]
 pub struct LineColumn {
     pub line: usize,
     pub column: usize,
 }
 
 #[cfg(procmacro2_semver_exempt)]
 thread_local! {
@@ -309,17 +318,17 @@ impl Codemap {
             if file.span_within(span) {
                 return file;
             }
         }
         panic!("Invalid span with no related FileInfo!");
     }
 }
 
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+#[derive(Clone, Copy, PartialEq, Eq)]
 pub struct Span {
     #[cfg(procmacro2_semver_exempt)]
     lo: u32,
     #[cfg(procmacro2_semver_exempt)]
     hi: u32,
 }
 
 impl Span {
@@ -388,132 +397,139 @@ impl Span {
             Some(Span {
                 lo: cmp::min(self.lo, other.lo),
                 hi: cmp::max(self.hi, other.hi),
             })
         })
     }
 }
 
-#[derive(Copy, Clone)]
-pub struct Term {
-    intern: usize,
-    span: Span,
+impl fmt::Debug for Span {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        #[cfg(procmacro2_semver_exempt)]
+        return write!(f, "bytes({}..{})", self.lo, self.hi);
+
+        #[cfg(not(procmacro2_semver_exempt))]
+        write!(f, "Span")
+    }
 }
 
-thread_local!(static SYMBOLS: RefCell<Interner> = RefCell::new(Interner::new()));
+#[derive(Clone)]
+pub struct Ident {
+    sym: String,
+    span: Span,
+    raw: bool,
+}
 
-impl Term {
-    pub fn new(string: &str, span: Span) -> Term {
+impl Ident {
+    fn _new(string: &str, raw: bool, span: Span) -> Ident {
         validate_term(string);
 
-        Term {
-            intern: SYMBOLS.with(|s| s.borrow_mut().intern(string)),
+        Ident {
+            sym: string.to_owned(),
             span: span,
+            raw: raw,
         }
     }
 
-    pub fn as_str(&self) -> &str {
-        SYMBOLS.with(|interner| {
-            let interner = interner.borrow();
-            let s = interner.get(self.intern);
-            unsafe { &*(s as *const str) }
-        })
+    pub fn new(string: &str, span: Span) -> Ident {
+        Ident::_new(string, false, span)
+    }
+
+    pub fn new_raw(string: &str, span: Span) -> Ident {
+        Ident::_new(string, true, span)
     }
 
     pub fn span(&self) -> Span {
         self.span
     }
 
     pub fn set_span(&mut self, span: Span) {
         self.span = span;
     }
 }
 
+#[inline]
+fn is_ident_start(c: char) -> bool {
+    ('a' <= c && c <= 'z')
+        || ('A' <= c && c <= 'Z')
+        || c == '_'
+        || (c > '\x7f' && UnicodeXID::is_xid_start(c))
+}
+
+#[inline]
+fn is_ident_continue(c: char) -> bool {
+    ('a' <= c && c <= 'z')
+        || ('A' <= c && c <= 'Z')
+        || c == '_'
+        || ('0' <= c && c <= '9')
+        || (c > '\x7f' && UnicodeXID::is_xid_continue(c))
+}
+
 fn validate_term(string: &str) {
-    let validate = if string.starts_with('\'') {
-        &string[1..]
-    } else if string.starts_with("r#") {
-        &string[2..]
-    } else {
-        string
-    };
-
+    let validate = string;
     if validate.is_empty() {
-        panic!("Term is not allowed to be empty; use Option<Term>");
+        panic!("Ident is not allowed to be empty; use Option<Ident>");
     }
 
     if validate.bytes().all(|digit| digit >= b'0' && digit <= b'9') {
-        panic!("Term cannot be a number; use Literal instead");
+        panic!("Ident cannot be a number; use Literal instead");
     }
 
-    fn xid_ok(string: &str) -> bool {
+    fn ident_ok(string: &str) -> bool {
         let mut chars = string.chars();
         let first = chars.next().unwrap();
-        if !(UnicodeXID::is_xid_start(first) || first == '_') {
+        if !is_ident_start(first) {
             return false;
         }
         for ch in chars {
-            if !UnicodeXID::is_xid_continue(ch) {
+            if !is_ident_continue(ch) {
                 return false;
             }
         }
         true
     }
 
-    if !xid_ok(validate) {
-        panic!("{:?} is not a valid Term", string);
-    }
-}
-
-impl fmt::Debug for Term {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        f.debug_tuple("Term").field(&self.as_str()).finish()
+    if !ident_ok(validate) {
+        panic!("{:?} is not a valid Ident", string);
     }
 }
 
-struct Interner {
-    string_to_index: HashMap<MyRc, usize>,
-    index_to_string: Vec<Rc<String>>,
-}
-
-#[derive(Hash, Eq, PartialEq)]
-struct MyRc(Rc<String>);
-
-impl Borrow<str> for MyRc {
-    fn borrow(&self) -> &str {
-        &self.0
+impl fmt::Display for Ident {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        if self.raw {
+            "r#".fmt(f)?;
+        }
+        self.sym.fmt(f)
     }
 }
 
-impl Interner {
-    fn new() -> Interner {
-        Interner {
-            string_to_index: HashMap::new(),
-            index_to_string: Vec::new(),
-        }
+impl fmt::Debug for Ident {
+    // Ident(proc_macro), Ident(r#union)
+    #[cfg(not(procmacro2_semver_exempt))]
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let mut debug = f.debug_tuple("Ident");
+        debug.field(&format_args!("{}", self));
+        debug.finish()
     }
 
-    fn intern(&mut self, s: &str) -> usize {
-        if let Some(&idx) = self.string_to_index.get(s) {
-            return idx;
-        }
-        let s = Rc::new(s.to_string());
-        self.index_to_string.push(s.clone());
-        self.string_to_index
-            .insert(MyRc(s), self.index_to_string.len() - 1);
-        self.index_to_string.len() - 1
-    }
-
-    fn get(&self, idx: usize) -> &str {
-        &self.index_to_string[idx]
+    // Ident {
+    //     sym: proc_macro,
+    //     span: bytes(128..138)
+    // }
+    #[cfg(procmacro2_semver_exempt)]
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        let mut debug = f.debug_struct("Ident");
+        debug.field("sym", &format_args!("{}", self));
+        debug.field("span", &self.span);
+        debug.finish()
     }
 }
 
-#[derive(Clone, Debug)]
+#[derive(Clone)]
 pub struct Literal {
     text: String,
     span: Span,
 }
 
 macro_rules! suffixed_numbers {
     ($($name:ident => $kind:ident,)*) => ($(
         pub fn $name(n: $kind) -> Literal {
@@ -579,17 +595,18 @@ impl Literal {
         let mut s = f.to_string();
         if !s.contains(".") {
             s.push_str(".0");
         }
         Literal::_new(s)
     }
 
     pub fn string(t: &str) -> Literal {
-        let mut s = t.chars()
+        let mut s = t
+            .chars()
             .flat_map(|c| c.escape_default())
             .collect::<String>();
         s.push('"');
         s.insert(0, '"');
         Literal::_new(s)
     }
 
     pub fn character(t: char) -> Literal {
@@ -624,160 +641,155 @@ impl Literal {
 }
 
 impl fmt::Display for Literal {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         self.text.fmt(f)
     }
 }
 
-fn token_stream(mut input: Cursor) -> PResult<::TokenStream> {
+impl fmt::Debug for Literal {
+    fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+        let mut debug = fmt.debug_struct("Literal");
+        debug.field("lit", &format_args!("{}", self.text));
+        #[cfg(procmacro2_semver_exempt)]
+        debug.field("span", &self.span);
+        debug.finish()
+    }
+}
+
+fn token_stream(mut input: Cursor) -> PResult<TokenStream> {
     let mut trees = Vec::new();
     loop {
         let input_no_ws = skip_whitespace(input);
         if input_no_ws.rest.len() == 0 {
-            break
+            break;
         }
         if let Ok((a, tokens)) = doc_comment(input_no_ws) {
             input = a;
             trees.extend(tokens);
-            continue
+            continue;
         }
 
         let (a, tt) = match token_tree(input_no_ws) {
             Ok(p) => p,
             Err(_) => break,
         };
         trees.push(tt);
         input = a;
     }
-    Ok((input, ::TokenStream::_new(TokenStream { inner: trees })))
+    Ok((input, TokenStream { inner: trees }))
 }
 
 #[cfg(not(procmacro2_semver_exempt))]
 fn spanned<'a, T>(
     input: Cursor<'a>,
     f: fn(Cursor<'a>) -> PResult<'a, T>,
 ) -> PResult<'a, (T, ::Span)> {
     let (a, b) = f(skip_whitespace(input))?;
-    Ok((a, ((b, ::Span::_new(Span { })))))
+    Ok((a, ((b, ::Span::_new_stable(Span {})))))
 }
 
 #[cfg(procmacro2_semver_exempt)]
 fn spanned<'a, T>(
     input: Cursor<'a>,
     f: fn(Cursor<'a>) -> PResult<'a, T>,
 ) -> PResult<'a, (T, ::Span)> {
     let input = skip_whitespace(input);
     let lo = input.off;
     let (a, b) = f(input)?;
     let hi = a.off;
-    let span = ::Span::_new(Span { lo: lo, hi: hi });
+    let span = ::Span::_new_stable(Span { lo: lo, hi: hi });
     Ok((a, (b, span)))
 }
 
 fn token_tree(input: Cursor) -> PResult<TokenTree> {
     let (rest, (mut tt, span)) = spanned(input, token_kind)?;
     tt.set_span(span);
     Ok((rest, tt))
 }
 
 named!(token_kind -> TokenTree, alt!(
     map!(group, TokenTree::Group)
     |
-    map!(literal, TokenTree::Literal) // must be before symbol
+    map!(literal, |l| TokenTree::Literal(::Literal::_new_stable(l))) // must be before symbol
     |
-    symbol
+    map!(op, TokenTree::Punct)
     |
-    map!(op, TokenTree::Op)
+    symbol_leading_ws
 ));
 
 named!(group -> Group, alt!(
     delimited!(
         punct!("("),
         token_stream,
         punct!(")")
-    ) => { |ts| Group::new(Delimiter::Parenthesis, ts) }
+    ) => { |ts| Group::new(Delimiter::Parenthesis, ::TokenStream::_new_stable(ts)) }
     |
     delimited!(
         punct!("["),
         token_stream,
         punct!("]")
-    ) => { |ts| Group::new(Delimiter::Bracket, ts) }
+    ) => { |ts| Group::new(Delimiter::Bracket, ::TokenStream::_new_stable(ts)) }
     |
     delimited!(
         punct!("{"),
         token_stream,
         punct!("}")
-    ) => { |ts| Group::new(Delimiter::Brace, ts) }
+    ) => { |ts| Group::new(Delimiter::Brace, ::TokenStream::_new_stable(ts)) }
 ));
 
-fn symbol(mut input: Cursor) -> PResult<TokenTree> {
-    input = skip_whitespace(input);
+fn symbol_leading_ws(input: Cursor) -> PResult<TokenTree> {
+    symbol(skip_whitespace(input))
+}
 
+fn symbol(input: Cursor) -> PResult<TokenTree> {
     let mut chars = input.char_indices();
 
-    let lifetime = input.starts_with("'");
-    if lifetime {
-        chars.next();
-    }
-
-    let raw = !lifetime && input.starts_with("r#");
+    let raw = input.starts_with("r#");
     if raw {
         chars.next();
         chars.next();
     }
 
     match chars.next() {
-        Some((_, ch)) if UnicodeXID::is_xid_start(ch) || ch == '_' => {}
+        Some((_, ch)) if is_ident_start(ch) => {}
         _ => return Err(LexError),
     }
 
     let mut end = input.len();
     for (i, ch) in chars {
-        if !UnicodeXID::is_xid_continue(ch) {
+        if !is_ident_continue(ch) {
             end = i;
             break;
         }
     }
 
     let a = &input.rest[..end];
-    if a == "r#_" || lifetime && a != "'static" && KEYWORDS.contains(&&a[1..]) {
+    if a == "r#_" {
         Err(LexError)
-    } else if a == "_" {
-        Ok((input.advance(end), Op::new('_', Spacing::Alone).into()))
     } else {
-        Ok((
-            input.advance(end),
-            ::Term::new(a, ::Span::call_site()).into(),
-        ))
+        let ident = if raw {
+            ::Ident::_new_raw(&a[2..], ::Span::call_site())
+        } else {
+            ::Ident::new(a, ::Span::call_site())
+        };
+        Ok((input.advance(end), ident.into()))
     }
 }
 
-// From https://github.com/rust-lang/rust/blob/master/src/libsyntax_pos/symbol.rs
-static KEYWORDS: &'static [&'static str] = &[
-    "abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate", "do",
-    "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop",
-    "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure",
-    "ref", "return", "self", "Self", "sizeof", "static", "struct", "super", "trait", "true",
-    "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield",
-];
-
-fn literal(input: Cursor) -> PResult<::Literal> {
+fn literal(input: Cursor) -> PResult<Literal> {
     let input_no_ws = skip_whitespace(input);
 
     match literal_nocapture(input_no_ws) {
         Ok((a, ())) => {
             let start = input.len() - input_no_ws.len();
             let len = input_no_ws.len() - a.len();
             let end = start + len;
-            Ok((
-                a,
-                ::Literal::_new(Literal::_new(input.rest[start..end].to_string())),
-            ))
+            Ok((a, Literal::_new(input.rest[start..end].to_string())))
         }
         Err(LexError) => Err(LexError),
     }
 }
 
 named!(literal_nocapture -> (), alt!(
     string
     |
@@ -1126,17 +1138,17 @@ fn float_digits(input: Cursor) -> PResul
     }
 
     Ok((input.advance(len), ()))
 }
 
 fn int(input: Cursor) -> PResult<()> {
     let (rest, ()) = digits(input)?;
     for suffix in &[
-        "isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128"
+        "isize", "i8", "i16", "i32", "i64", "i128", "usize", "u8", "u16", "u32", "u64", "u128",
     ] {
         if rest.starts_with(suffix) {
             return word_break(rest.advance(suffix.len()));
         }
     }
     word_break(rest)
 }
 
@@ -1178,56 +1190,65 @@ fn digits(mut input: Cursor) -> PResult<
     }
     if empty {
         Err(LexError)
     } else {
         Ok((input.advance(len), ()))
     }
 }
 
-fn op(input: Cursor) -> PResult<Op> {
+fn op(input: Cursor) -> PResult<Punct> {
     let input = skip_whitespace(input);
     match op_char(input) {
+        Ok((rest, '\'')) => {
+            symbol(rest)?;
+            Ok((rest, Punct::new('\'', Spacing::Joint)))
+        }
         Ok((rest, ch)) => {
             let kind = match op_char(rest) {
                 Ok(_) => Spacing::Joint,
                 Err(LexError) => Spacing::Alone,
             };
-            Ok((rest, Op::new(ch, kind)))
+            Ok((rest, Punct::new(ch, kind)))
         }
         Err(LexError) => Err(LexError),
     }
 }
 
 fn op_char(input: Cursor) -> PResult<char> {
+    if input.starts_with("//") || input.starts_with("/*") {
+        // Do not accept `/` of a comment as an op.
+        return Err(LexError);
+    }
+
     let mut chars = input.chars();
     let first = match chars.next() {
         Some(ch) => ch,
         None => {
             return Err(LexError);
         }
     };
-    let recognized = "~!@#$%^&*-=+|;:,<.>/?";
+    let recognized = "~!@#$%^&*-=+|;:,<.>/?'";
     if recognized.contains(first) {
         Ok((input.advance(first.len_utf8()), first))
     } else {
         Err(LexError)
     }
 }
 
 fn doc_comment(input: Cursor) -> PResult<Vec<TokenTree>> {
     let mut trees = Vec::new();
     let (rest, ((comment, inner), span)) = spanned(input, doc_comment_contents)?;
-    trees.push(TokenTree::Op(Op::new('#', Spacing::Alone)));
+    trees.push(TokenTree::Punct(Punct::new('#', Spacing::Alone)));
     if inner {
-        trees.push(Op::new('!', Spacing::Alone).into());
+        trees.push(Punct::new('!', Spacing::Alone).into());
     }
     let mut stream = vec![
-        TokenTree::Term(::Term::new("doc", span)),
-        TokenTree::Op(Op::new('=', Spacing::Alone)),
+        TokenTree::Ident(::Ident::new("doc", span)),
+        TokenTree::Punct(Punct::new('=', Spacing::Alone)),
         TokenTree::Literal(::Literal::string(comment)),
     ];
     for tt in stream.iter_mut() {
         tt.set_span(span);
     }
     trees.push(Group::new(Delimiter::Bracket, stream.into_iter().collect()).into());
     for tt in trees.iter_mut() {
         tt.set_span(span);
--- a/third_party/rust/proc-macro2/src/strnom.rs
+++ b/third_party/rust/proc-macro2/src/strnom.rs
@@ -1,15 +1,15 @@
 //! Adapted from [`nom`](https://github.com/Geal/nom).
 
 use std::str::{Bytes, CharIndices, Chars};
 
 use unicode_xid::UnicodeXID;
 
-use imp::LexError;
+use stable::LexError;
 
 #[derive(Copy, Clone, Eq, PartialEq)]
 pub struct Cursor<'a> {
     pub rest: &'a str,
     #[cfg(procmacro2_semver_exempt)]
     pub off: u32,
 }
 
@@ -68,28 +68,30 @@ pub fn whitespace(input: Cursor) -> PRes
         return Err(LexError);
     }
 
     let bytes = input.as_bytes();
     let mut i = 0;
     while i < bytes.len() {
         let s = input.advance(i);
         if bytes[i] == b'/' {
-            if s.starts_with("//") && (!s.starts_with("///") || s.starts_with("////"))
+            if s.starts_with("//")
+                && (!s.starts_with("///") || s.starts_with("////"))
                 && !s.starts_with("//!")
             {
                 if let Some(len) = s.find('\n') {
                     i += len + 1;
                     continue;
                 }
                 break;
             } else if s.starts_with("/**/") {
                 i += 4;
                 continue;
-            } else if s.starts_with("/*") && (!s.starts_with("/**") || s.starts_with("/***"))
+            } else if s.starts_with("/*")
+                && (!s.starts_with("/**") || s.starts_with("/***"))
                 && !s.starts_with("/*!")
             {
                 let (_, com) = block_comment(s)?;
                 i += com.len();
                 continue;
             }
         }
         match bytes[i] {
--- a/third_party/rust/proc-macro2/src/unstable.rs
+++ b/third_party/rust/proc-macro2/src/unstable.rs
@@ -1,337 +1,559 @@
 #![cfg_attr(not(procmacro2_semver_exempt), allow(dead_code))]
 
 use std::fmt;
 use std::iter;
+use std::panic;
 use std::str::FromStr;
 
 use proc_macro;
+use stable;
 
-use {Delimiter, Group, Op, Spacing, TokenTree};
+use {Delimiter, Group, Punct, Spacing, TokenTree};
 
 #[derive(Clone)]
-pub struct TokenStream(proc_macro::TokenStream);
+pub enum TokenStream {
+    Nightly(proc_macro::TokenStream),
+    Stable(stable::TokenStream),
+}
+
+pub enum LexError {
+    Nightly(proc_macro::LexError),
+    Stable(stable::LexError),
+}
+
+fn nightly_works() -> bool {
+    use std::sync::atomic::*;
+    static WORKS: AtomicUsize = ATOMIC_USIZE_INIT;
 
-pub struct LexError(proc_macro::LexError);
+    match WORKS.load(Ordering::SeqCst) {
+        1 => return false,
+        2 => return true,
+        _ => {}
+    }
+    let works = panic::catch_unwind(|| proc_macro::Span::call_site()).is_ok();
+    WORKS.store(works as usize + 1, Ordering::SeqCst);
+    works
+}
+
+fn mismatch() -> ! {
+    panic!("stable/nightly mismatch")
+}
 
 impl TokenStream {
-    pub fn empty() -> TokenStream {
-        TokenStream(proc_macro::TokenStream::empty())
+    pub fn new() -> TokenStream {
+        if nightly_works() {
+            TokenStream::Nightly(proc_macro::TokenStream::new())
+        } else {
+            TokenStream::Stable(stable::TokenStream::new())
+        }
     }
 
     pub fn is_empty(&self) -> bool {
-        self.0.is_empty()
+        match self {
+            TokenStream::Nightly(tts) => tts.is_empty(),
+            TokenStream::Stable(tts) => tts.is_empty(),
+        }
+    }
+
+    fn unwrap_nightly(self) -> proc_macro::TokenStream {
+        match self {
+            TokenStream::Nightly(s) => s,
+            TokenStream::Stable(_) => mismatch(),
+        }
     }
 }
 
 impl FromStr for TokenStream {
     type Err = LexError;
 
     fn from_str(src: &str) -> Result<TokenStream, LexError> {
-        Ok(TokenStream(src.parse().map_err(LexError)?))
+        if nightly_works() {
+            Ok(TokenStream::Nightly(src.parse()?))
+        } else {
+            Ok(TokenStream::Stable(src.parse()?))
+        }
     }
 }
 
 impl fmt::Display for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
+        match self {
+            TokenStream::Nightly(tts) => tts.fmt(f),
+            TokenStream::Stable(tts) => tts.fmt(f),
+        }
     }
 }
 
 impl From<proc_macro::TokenStream> for TokenStream {
     fn from(inner: proc_macro::TokenStream) -> TokenStream {
-        TokenStream(inner)
+        TokenStream::Nightly(inner)
     }
 }
 
 impl From<TokenStream> for proc_macro::TokenStream {
     fn from(inner: TokenStream) -> proc_macro::TokenStream {
-        inner.0
+        match inner {
+            TokenStream::Nightly(inner) => inner,
+            TokenStream::Stable(inner) => inner.to_string().parse().unwrap(),
+        }
+    }
+}
+
+impl From<stable::TokenStream> for TokenStream {
+    fn from(inner: stable::TokenStream) -> TokenStream {
+        TokenStream::Stable(inner)
     }
 }
 
 impl From<TokenTree> for TokenStream {
     fn from(token: TokenTree) -> TokenStream {
+        if !nightly_works() {
+            return TokenStream::Stable(token.into());
+        }
         let tt: proc_macro::TokenTree = match token {
             TokenTree::Group(tt) => {
                 let delim = match tt.delimiter() {
                     Delimiter::Parenthesis => proc_macro::Delimiter::Parenthesis,
                     Delimiter::Bracket => proc_macro::Delimiter::Bracket,
                     Delimiter::Brace => proc_macro::Delimiter::Brace,
                     Delimiter::None => proc_macro::Delimiter::None,
                 };
                 let span = tt.span();
-                let mut group = proc_macro::Group::new(delim, tt.stream.inner.0);
-                group.set_span(span.inner.0);
+                let mut group = proc_macro::Group::new(delim, tt.stream.inner.unwrap_nightly());
+                group.set_span(span.inner.unwrap_nightly());
                 group.into()
             }
-            TokenTree::Op(tt) => {
+            TokenTree::Punct(tt) => {
                 let spacing = match tt.spacing() {
                     Spacing::Joint => proc_macro::Spacing::Joint,
                     Spacing::Alone => proc_macro::Spacing::Alone,
                 };
-                let mut op = proc_macro::Op::new(tt.op(), spacing);
-                op.set_span(tt.span().inner.0);
+                let mut op = proc_macro::Punct::new(tt.as_char(), spacing);
+                op.set_span(tt.span().inner.unwrap_nightly());
                 op.into()
             }
-            TokenTree::Term(tt) => tt.inner.term.into(),
-            TokenTree::Literal(tt) => tt.inner.lit.into(),
+            TokenTree::Ident(tt) => tt.inner.unwrap_nightly().into(),
+            TokenTree::Literal(tt) => tt.inner.unwrap_nightly().into(),
         };
-        TokenStream(tt.into())
+        TokenStream::Nightly(tt.into())
     }
 }
 
 impl iter::FromIterator<TokenTree> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = TokenTree>>(streams: I) -> Self {
-        let streams = streams.into_iter().map(TokenStream::from)
-            .flat_map(|t| t.0);
-        TokenStream(streams.collect::<proc_macro::TokenStream>())
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
+        if nightly_works() {
+            let trees = trees
+                .into_iter()
+                .map(TokenStream::from)
+                .flat_map(|t| match t {
+                    TokenStream::Nightly(s) => s,
+                    TokenStream::Stable(_) => mismatch(),
+                });
+            TokenStream::Nightly(trees.collect())
+        } else {
+            TokenStream::Stable(trees.into_iter().collect())
+        }
+    }
+}
+
+impl Extend<TokenTree> for TokenStream {
+    fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, streams: I) {
+        match self {
+            TokenStream::Nightly(tts) => {
+                *tts = tts
+                    .clone()
+                    .into_iter()
+                    .chain(
+                        streams
+                            .into_iter()
+                            .map(TokenStream::from)
+                            .flat_map(|t| match t {
+                                TokenStream::Nightly(tts) => tts.into_iter(),
+                                _ => panic!(),
+                            }),
+                    )
+                    .collect();
+            }
+            TokenStream::Stable(tts) => tts.extend(streams),
+        }
     }
 }
 
 impl fmt::Debug for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
+        match self {
+            TokenStream::Nightly(tts) => tts.fmt(f),
+            TokenStream::Stable(tts) => tts.fmt(f),
+        }
+    }
+}
+
+impl From<proc_macro::LexError> for LexError {
+    fn from(e: proc_macro::LexError) -> LexError {
+        LexError::Nightly(e)
+    }
+}
+
+impl From<stable::LexError> for LexError {
+    fn from(e: stable::LexError) -> LexError {
+        LexError::Stable(e)
     }
 }
 
 impl fmt::Debug for LexError {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
+        match self {
+            LexError::Nightly(e) => e.fmt(f),
+            LexError::Stable(e) => e.fmt(f),
+        }
     }
 }
 
-pub struct TokenTreeIter(proc_macro::token_stream::IntoIter);
+pub enum TokenTreeIter {
+    Nightly(proc_macro::token_stream::IntoIter),
+    Stable(stable::TokenTreeIter),
+}
 
 impl IntoIterator for TokenStream {
     type Item = TokenTree;
     type IntoIter = TokenTreeIter;
 
     fn into_iter(self) -> TokenTreeIter {
-        TokenTreeIter(self.0.into_iter())
+        match self {
+            TokenStream::Nightly(tts) => TokenTreeIter::Nightly(tts.into_iter()),
+            TokenStream::Stable(tts) => TokenTreeIter::Stable(tts.into_iter()),
+        }
     }
 }
 
 impl Iterator for TokenTreeIter {
     type Item = TokenTree;
 
     fn next(&mut self) -> Option<TokenTree> {
-        let token = self.0.next()?;
+        let token = match self {
+            TokenTreeIter::Nightly(iter) => iter.next()?,
+            TokenTreeIter::Stable(iter) => return iter.next(),
+        };
         Some(match token {
             proc_macro::TokenTree::Group(tt) => {
                 let delim = match tt.delimiter() {
                     proc_macro::Delimiter::Parenthesis => Delimiter::Parenthesis,
                     proc_macro::Delimiter::Bracket => Delimiter::Bracket,
                     proc_macro::Delimiter::Brace => Delimiter::Brace,
                     proc_macro::Delimiter::None => Delimiter::None,
                 };
-                let stream = ::TokenStream::_new(TokenStream(tt.stream()));
+                let stream = ::TokenStream::_new(TokenStream::Nightly(tt.stream()));
                 let mut g = Group::new(delim, stream);
-                g.set_span(::Span::_new(Span(tt.span())));
+                g.set_span(::Span::_new(Span::Nightly(tt.span())));
                 g.into()
             }
-            proc_macro::TokenTree::Op(tt) => {
+            proc_macro::TokenTree::Punct(tt) => {
                 let spacing = match tt.spacing() {
                     proc_macro::Spacing::Joint => Spacing::Joint,
                     proc_macro::Spacing::Alone => Spacing::Alone,
                 };
-                let mut o = Op::new(tt.op(), spacing);
-                o.set_span(::Span::_new(Span(tt.span())));
+                let mut o = Punct::new(tt.as_char(), spacing);
+                o.set_span(::Span::_new(Span::Nightly(tt.span())));
                 o.into()
             }
-            proc_macro::TokenTree::Term(s) => {
-                ::Term::_new(Term {
-                    term: s,
-                }).into()
-            }
-            proc_macro::TokenTree::Literal(l) => {
-                ::Literal::_new(Literal {
-                    lit: l,
-                }).into()
-            }
+            proc_macro::TokenTree::Ident(s) => ::Ident::_new(Ident::Nightly(s)).into(),
+            proc_macro::TokenTree::Literal(l) => ::Literal::_new(Literal::Nightly(l)).into(),
         })
     }
 
     fn size_hint(&self) -> (usize, Option<usize>) {
-        self.0.size_hint()
+        match self {
+            TokenTreeIter::Nightly(tts) => tts.size_hint(),
+            TokenTreeIter::Stable(tts) => tts.size_hint(),
+        }
     }
 }
 
 impl fmt::Debug for TokenTreeIter {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         f.debug_struct("TokenTreeIter").finish()
     }
 }
 
-#[derive(Clone, PartialEq, Eq)]
-pub struct FileName(String);
-
-impl fmt::Display for FileName {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
+pub use stable::FileName;
 
 // NOTE: We have to generate our own filename object here because we can't wrap
 // the one provided by proc_macro.
 #[derive(Clone, PartialEq, Eq)]
-pub struct SourceFile(proc_macro::SourceFile, FileName);
+pub enum SourceFile {
+    Nightly(proc_macro::SourceFile, FileName),
+    Stable(stable::SourceFile),
+}
 
 impl SourceFile {
-    fn new(sf: proc_macro::SourceFile) -> Self {
-        let filename = FileName(sf.path().to_string());
-        SourceFile(sf, filename)
+    fn nightly(sf: proc_macro::SourceFile) -> Self {
+        let filename = stable::file_name(sf.path().to_string());
+        SourceFile::Nightly(sf, filename)
     }
 
     /// Get the path to this source file as a string.
     pub fn path(&self) -> &FileName {
-        &self.1
+        match self {
+            SourceFile::Nightly(_, f) => f,
+            SourceFile::Stable(a) => a.path(),
+        }
     }
 
     pub fn is_real(&self) -> bool {
-        self.0.is_real()
+        match self {
+            SourceFile::Nightly(a, _) => a.is_real(),
+            SourceFile::Stable(a) => a.is_real(),
+        }
     }
 }
 
 impl AsRef<FileName> for SourceFile {
     fn as_ref(&self) -> &FileName {
         self.path()
     }
 }
 
 impl fmt::Debug for SourceFile {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
+        match self {
+            SourceFile::Nightly(a, _) => a.fmt(f),
+            SourceFile::Stable(a) => a.fmt(f),
+        }
     }
 }
 
 pub struct LineColumn {
     pub line: usize,
     pub column: usize,
 }
 
 #[derive(Copy, Clone)]
-pub struct Span(proc_macro::Span);
-
-impl From<proc_macro::Span> for ::Span {
-    fn from(proc_span: proc_macro::Span) -> ::Span {
-        ::Span::_new(Span(proc_span))
-    }
+pub enum Span {
+    Nightly(proc_macro::Span),
+    Stable(stable::Span),
 }
 
 impl Span {
     pub fn call_site() -> Span {
-        Span(proc_macro::Span::call_site())
+        if nightly_works() {
+            Span::Nightly(proc_macro::Span::call_site())
+        } else {
+            Span::Stable(stable::Span::call_site())
+        }
     }
 
     pub fn def_site() -> Span {
-        Span(proc_macro::Span::def_site())
+        if nightly_works() {
+            Span::Nightly(proc_macro::Span::def_site())
+        } else {
+            Span::Stable(stable::Span::def_site())
+        }
     }
 
     pub fn resolved_at(&self, other: Span) -> Span {
-        Span(self.0.resolved_at(other.0))
+        match (self, other) {
+            (Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.resolved_at(b)),
+            (Span::Stable(a), Span::Stable(b)) => Span::Stable(a.resolved_at(b)),
+            _ => mismatch(),
+        }
     }
 
     pub fn located_at(&self, other: Span) -> Span {
-        Span(self.0.located_at(other.0))
+        match (self, other) {
+            (Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.located_at(b)),
+            (Span::Stable(a), Span::Stable(b)) => Span::Stable(a.located_at(b)),
+            _ => mismatch(),
+        }
     }
 
     pub fn unstable(self) -> proc_macro::Span {
-        self.0
+        match self {
+            Span::Nightly(s) => s,
+            Span::Stable(_) => mismatch(),
+        }
     }
 
+    #[cfg(procmacro2_semver_exempt)]
     pub fn source_file(&self) -> SourceFile {
-        SourceFile::new(self.0.source_file())
+        match self {
+            Span::Nightly(s) => SourceFile::nightly(s.source_file()),
+            Span::Stable(s) => SourceFile::Stable(s.source_file()),
+        }
     }
 
+    #[cfg(procmacro2_semver_exempt)]
     pub fn start(&self) -> LineColumn {
-        let proc_macro::LineColumn { line, column } = self.0.start();
-        LineColumn { line, column }
+        match self {
+            Span::Nightly(s) => {
+                let proc_macro::LineColumn { line, column } = s.start();
+                LineColumn { line, column }
+            }
+            Span::Stable(s) => {
+                let stable::LineColumn { line, column } = s.start();
+                LineColumn { line, column }
+            }
+        }
     }
 
+    #[cfg(procmacro2_semver_exempt)]
     pub fn end(&self) -> LineColumn {
-        let proc_macro::LineColumn { line, column } = self.0.end();
-        LineColumn { line, column }
+        match self {
+            Span::Nightly(s) => {
+                let proc_macro::LineColumn { line, column } = s.end();
+                LineColumn { line, column }
+            }
+            Span::Stable(s) => {
+                let stable::LineColumn { line, column } = s.end();
+                LineColumn { line, column }
+            }
+        }
     }
 
+    #[cfg(procmacro2_semver_exempt)]
     pub fn join(&self, other: Span) -> Option<Span> {
-        self.0.join(other.0).map(Span)
+        let ret = match (self, other) {
+            (Span::Nightly(a), Span::Nightly(b)) => Span::Nightly(a.join(b)?),
+            (Span::Stable(a), Span::Stable(b)) => Span::Stable(a.join(b)?),
+            _ => return None,
+        };
+        Some(ret)
     }
 
     pub fn eq(&self, other: &Span) -> bool {
-        self.0.eq(&other.0)
+        match (self, other) {
+            (Span::Nightly(a), Span::Nightly(b)) => a.eq(b),
+            (Span::Stable(a), Span::Stable(b)) => a.eq(b),
+            _ => false,
+        }
+    }
+
+    fn unwrap_nightly(self) -> proc_macro::Span {
+        match self {
+            Span::Nightly(s) => s,
+            Span::Stable(_) => mismatch(),
+        }
+    }
+}
+
+impl From<proc_macro::Span> for ::Span {
+    fn from(proc_span: proc_macro::Span) -> ::Span {
+        ::Span::_new(Span::Nightly(proc_span))
+    }
+}
+
+impl From<stable::Span> for Span {
+    fn from(inner: stable::Span) -> Span {
+        Span::Stable(inner)
     }
 }
 
 impl fmt::Debug for Span {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.0.fmt(f)
-    }
-}
-
-#[derive(Copy, Clone)]
-pub struct Term {
-    term: proc_macro::Term,
-}
-
-impl Term {
-    pub fn new(string: &str, span: Span) -> Term {
-        Term {
-            term: proc_macro::Term::new(string, span.0),
+        match self {
+            Span::Nightly(s) => s.fmt(f),
+            Span::Stable(s) => s.fmt(f),
         }
     }
-
-    pub fn as_str(&self) -> &str {
-        self.term.as_str()
-    }
-
-    pub fn span(&self) -> Span {
-        Span(self.term.span())
-    }
-
-    pub fn set_span(&mut self, span: Span) {
-        self.term.set_span(span.0);
-    }
-}
-
-impl fmt::Debug for Term {
-    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.term.fmt(f)
-    }
 }
 
 #[derive(Clone)]
-pub struct Literal {
-    lit: proc_macro::Literal,
+pub enum Ident {
+    Nightly(proc_macro::Ident),
+    Stable(stable::Ident),
+}
+
+impl Ident {
+    pub fn new(string: &str, span: Span) -> Ident {
+        match span {
+            Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new(string, s)),
+            Span::Stable(s) => Ident::Stable(stable::Ident::new(string, s)),
+        }
+    }
+
+    pub fn new_raw(string: &str, span: Span) -> Ident {
+        match span {
+            Span::Nightly(s) => Ident::Nightly(proc_macro::Ident::new_raw(string, s)),
+            Span::Stable(s) => Ident::Stable(stable::Ident::new_raw(string, s)),
+        }
+    }
+
+    pub fn span(&self) -> Span {
+        match self {
+            Ident::Nightly(t) => Span::Nightly(t.span()),
+            Ident::Stable(t) => Span::Stable(t.span()),
+        }
+    }
+
+    pub fn set_span(&mut self, span: Span) {
+        match (self, span) {
+            (Ident::Nightly(t), Span::Nightly(s)) => t.set_span(s),
+            (Ident::Stable(t), Span::Stable(s)) => t.set_span(s),
+            _ => mismatch(),
+        }
+    }
+
+    fn unwrap_nightly(self) -> proc_macro::Ident {
+        match self {
+            Ident::Nightly(s) => s,
+            Ident::Stable(_) => mismatch(),
+        }
+    }
+}
+
+impl fmt::Display for Ident {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match self {
+            Ident::Nightly(t) => t.fmt(f),
+            Ident::Stable(t) => t.fmt(f),
+        }
+    }
+}
+
+impl fmt::Debug for Ident {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match self {
+            Ident::Nightly(t) => t.fmt(f),
+            Ident::Stable(t) => t.fmt(f),
+        }
+    }
+}
+
+#[derive(Clone)]
+pub enum Literal {
+    Nightly(proc_macro::Literal),
+    Stable(stable::Literal),
 }
 
 macro_rules! suffixed_numbers {
     ($($name:ident => $kind:ident,)*) => ($(
         pub fn $name(n: $kind) -> Literal {
-            Literal::_new(proc_macro::Literal::$name(n))
+            if nightly_works() {
+                Literal::Nightly(proc_macro::Literal::$name(n))
+            } else {
+                Literal::Stable(stable::Literal::$name(n))
+            }
         }
     )*)
 }
 
 macro_rules! unsuffixed_integers {
     ($($name:ident => $kind:ident,)*) => ($(
         pub fn $name(n: $kind) -> Literal {
-            Literal::_new(proc_macro::Literal::$name(n))
+            if nightly_works() {
+                Literal::Nightly(proc_macro::Literal::$name(n))
+            } else {
+                Literal::Stable(stable::Literal::$name(n))
+            }
         }
     )*)
 }
 
 impl Literal {
-    fn _new(lit: proc_macro::Literal) -> Literal {
-        Literal {
-            lit,
-        }
-    }
-
     suffixed_numbers! {
         u8_suffixed => u8,
         u16_suffixed => u16,
         u32_suffixed => u32,
         u64_suffixed => u64,
         usize_suffixed => usize,
         i8_suffixed => i8,
         i16_suffixed => i16,
@@ -352,48 +574,93 @@ impl Literal {
         i8_unsuffixed => i8,
         i16_unsuffixed => i16,
         i32_unsuffixed => i32,
         i64_unsuffixed => i64,
         isize_unsuffixed => isize,
     }
 
     pub fn f32_unsuffixed(f: f32) -> Literal {
-        Literal::_new(proc_macro::Literal::f32_unsuffixed(f))
+        if nightly_works() {
+            Literal::Nightly(proc_macro::Literal::f32_unsuffixed(f))
+        } else {
+            Literal::Stable(stable::Literal::f32_unsuffixed(f))
+        }
     }
 
     pub fn f64_unsuffixed(f: f64) -> Literal {
-        Literal::_new(proc_macro::Literal::f64_unsuffixed(f))
+        if nightly_works() {
+            Literal::Nightly(proc_macro::Literal::f64_unsuffixed(f))
+        } else {
+            Literal::Stable(stable::Literal::f64_unsuffixed(f))
+        }
     }
 
-
     pub fn string(t: &str) -> Literal {
-        Literal::_new(proc_macro::Literal::string(t))
+        if nightly_works() {
+            Literal::Nightly(proc_macro::Literal::string(t))
+        } else {
+            Literal::Stable(stable::Literal::string(t))
+        }
     }
 
     pub fn character(t: char) -> Literal {
-        Literal::_new(proc_macro::Literal::character(t))
+        if nightly_works() {
+            Literal::Nightly(proc_macro::Literal::character(t))
+        } else {
+            Literal::Stable(stable::Literal::character(t))
+        }
     }
 
     pub fn byte_string(bytes: &[u8]) -> Literal {
-        Literal::_new(proc_macro::Literal::byte_string(bytes))
+        if nightly_works() {
+            Literal::Nightly(proc_macro::Literal::byte_string(bytes))
+        } else {
+            Literal::Stable(stable::Literal::byte_string(bytes))
+        }
     }
 
     pub fn span(&self) -> Span {
-        Span(self.lit.span())
+        match self {
+            Literal::Nightly(lit) => Span::Nightly(lit.span()),
+            Literal::Stable(lit) => Span::Stable(lit.span()),
+        }
     }
 
     pub fn set_span(&mut self, span: Span) {
-        self.lit.set_span(span.0);
+        match (self, span) {
+            (Literal::Nightly(lit), Span::Nightly(s)) => lit.set_span(s),
+            (Literal::Stable(lit), Span::Stable(s)) => lit.set_span(s),
+            _ => mismatch(),
+        }
+    }
+
+    fn unwrap_nightly(self) -> proc_macro::Literal {
+        match self {
+            Literal::Nightly(s) => s,
+            Literal::Stable(_) => mismatch(),
+        }
+    }
+}
+
+impl From<stable::Literal> for Literal {
+    fn from(s: stable::Literal) -> Literal {
+        Literal::Stable(s)
     }
 }
 
 impl fmt::Display for Literal {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.lit.fmt(f)
+        match self {
+            Literal::Nightly(t) => t.fmt(f),
+            Literal::Stable(t) => t.fmt(f),
+        }
     }
 }
 
 impl fmt::Debug for Literal {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.lit.fmt(f)
+        match self {
+            Literal::Nightly(t) => t.fmt(f),
+            Literal::Stable(t) => t.fmt(f),
+        }
     }
 }
--- a/third_party/rust/proc-macro2/tests/test.rs
+++ b/third_party/rust/proc-macro2/tests/test.rs
@@ -1,87 +1,87 @@
 extern crate proc_macro2;
 
 use std::str::{self, FromStr};
 
-use proc_macro2::{Literal, Span, Term, TokenStream, TokenTree};
+use proc_macro2::{Ident, Literal, Spacing, Span, TokenStream, TokenTree};
 
 #[test]
 fn terms() {
-    assert_eq!(Term::new("String", Span::call_site()).as_str(), "String");
-    assert_eq!(Term::new("fn", Span::call_site()).as_str(), "fn");
-    assert_eq!(Term::new("_", Span::call_site()).as_str(), "_");
-}
-
-#[test]
-fn raw_terms() {
-    assert_eq!(Term::new("r#String", Span::call_site()).as_str(), "r#String");
-    assert_eq!(Term::new("r#fn", Span::call_site()).as_str(), "r#fn");
-    assert_eq!(Term::new("r#_", Span::call_site()).as_str(), "r#_");
+    assert_eq!(
+        Ident::new("String", Span::call_site()).to_string(),
+        "String"
+    );
+    assert_eq!(Ident::new("fn", Span::call_site()).to_string(), "fn");
+    assert_eq!(Ident::new("_", Span::call_site()).to_string(), "_");
 }
 
 #[test]
-fn lifetimes() {
-    assert_eq!(Term::new("'a", Span::call_site()).as_str(), "'a");
-    assert_eq!(Term::new("'static", Span::call_site()).as_str(), "'static");
-    assert_eq!(Term::new("'_", Span::call_site()).as_str(), "'_");
+#[cfg(procmacro2_semver_exempt)]
+fn raw_terms() {
+    assert_eq!(
+        Ident::new_raw("String", Span::call_site()).to_string(),
+        "r#String"
+    );
+    assert_eq!(Ident::new_raw("fn", Span::call_site()).to_string(), "r#fn");
+    assert_eq!(Ident::new_raw("_", Span::call_site()).to_string(), "r#_");
 }
 
 #[test]
-#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
+#[should_panic(expected = "Ident is not allowed to be empty; use Option<Ident>")]
 fn term_empty() {
-    Term::new("", Span::call_site());
+    Ident::new("", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = "Term cannot be a number; use Literal instead")]
+#[should_panic(expected = "Ident cannot be a number; use Literal instead")]
 fn term_number() {
-    Term::new("255", Span::call_site());
+    Ident::new("255", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = "\"a#\" is not a valid Term")]
+#[should_panic(expected = "\"a#\" is not a valid Ident")]
 fn term_invalid() {
-    Term::new("a#", Span::call_site());
+    Ident::new("a#", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
+#[should_panic(expected = "not a valid Ident")]
 fn raw_term_empty() {
-    Term::new("r#", Span::call_site());
+    Ident::new("r#", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = "Term cannot be a number; use Literal instead")]
+#[should_panic(expected = "not a valid Ident")]
 fn raw_term_number() {
-    Term::new("r#255", Span::call_site());
+    Ident::new("r#255", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = "\"r#a#\" is not a valid Term")]
+#[should_panic(expected = "\"r#a#\" is not a valid Ident")]
 fn raw_term_invalid() {
-    Term::new("r#a#", Span::call_site());
+    Ident::new("r#a#", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = "Term is not allowed to be empty; use Option<Term>")]
+#[should_panic(expected = "not a valid Ident")]
 fn lifetime_empty() {
-    Term::new("'", Span::call_site());
+    Ident::new("'", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = "Term cannot be a number; use Literal instead")]
+#[should_panic(expected = "not a valid Ident")]
 fn lifetime_number() {
-    Term::new("'255", Span::call_site());
+    Ident::new("'255", Span::call_site());
 }
 
 #[test]
-#[should_panic(expected = r#""\'a#" is not a valid Term"#)]
+#[should_panic(expected = r#""\'a#" is not a valid Ident"#)]
 fn lifetime_invalid() {
-    Term::new("'a#", Span::call_site());
+    Ident::new("'a#", Span::call_site());
 }
 
 #[test]
 fn literals() {
     assert_eq!(Literal::string("foo").to_string(), "\"foo\"");
     assert_eq!(Literal::string("\"").to_string(), "\"\\\"\"");
     assert_eq!(Literal::f32_unsuffixed(10.0).to_string(), "10.0");
 }
@@ -111,33 +111,33 @@ fn roundtrip() {
         1_0i32
         8u8
         9
         0
         0xffffffffffffffffffffffffffffffff
     ",
     );
     roundtrip("'a");
+    roundtrip("'_");
     roundtrip("'static");
     roundtrip("'\\u{10__FFFF}'");
     roundtrip("\"\\u{10_F0FF__}foo\\u{1_0_0_0__}\"");
 }
 
 #[test]
 fn fail() {
     fn fail(p: &str) {
-        if p.parse::<TokenStream>().is_ok() {
-            panic!("should have failed to parse: {}", p);
+        if let Ok(s) = p.parse::<TokenStream>() {
+            panic!("should have failed to parse: {}\n{:#?}", p, s);
         }
     }
     fail("1x");
     fail("1u80");
     fail("1f320");
     fail("' static");
-    fail("'mut");
     fail("r#1");
     fail("r#_");
 }
 
 #[cfg(procmacro2_semver_exempt)]
 #[test]
 fn span_test() {
     use proc_macro2::TokenTree;
@@ -256,49 +256,128 @@ fn tricky_doc_comment() {
     let stream = "/**/".parse::<proc_macro2::TokenStream>().unwrap();
     let tokens = stream.into_iter().collect::<Vec<_>>();
     assert!(tokens.is_empty(), "not empty -- {:?}", tokens);
 
     let stream = "/// doc".parse::<proc_macro2::TokenStream>().unwrap();
     let tokens = stream.into_iter().collect::<Vec<_>>();
     assert!(tokens.len() == 2, "not length 2 -- {:?}", tokens);
     match tokens[0] {
-        proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '#'),
+        proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '#'),
         _ => panic!("wrong token {:?}", tokens[0]),
     }
     let mut tokens = match tokens[1] {
         proc_macro2::TokenTree::Group(ref tt) => {
             assert_eq!(tt.delimiter(), proc_macro2::Delimiter::Bracket);
             tt.stream().into_iter()
         }
         _ => panic!("wrong token {:?}", tokens[0]),
     };
 
     match tokens.next().unwrap() {
-        proc_macro2::TokenTree::Term(ref tt) => assert_eq!(tt.as_str(), "doc"),
+        proc_macro2::TokenTree::Ident(ref tt) => assert_eq!(tt.to_string(), "doc"),
         t => panic!("wrong token {:?}", t),
     }
     match tokens.next().unwrap() {
-        proc_macro2::TokenTree::Op(ref tt) => assert_eq!(tt.op(), '='),
+        proc_macro2::TokenTree::Punct(ref tt) => assert_eq!(tt.as_char(), '='),
         t => panic!("wrong token {:?}", t),
     }
     match tokens.next().unwrap() {
         proc_macro2::TokenTree::Literal(ref tt) => {
             assert_eq!(tt.to_string(), "\" doc\"");
         }
         t => panic!("wrong token {:?}", t),
     }
     assert!(tokens.next().is_none());
 
     let stream = "//! doc".parse::<proc_macro2::TokenStream>().unwrap();
     let tokens = stream.into_iter().collect::<Vec<_>>();
     assert!(tokens.len() == 3, "not length 3 -- {:?}", tokens);
 }
 
 #[test]
+fn op_before_comment() {
+    let mut tts = TokenStream::from_str("~// comment").unwrap().into_iter();
+    match tts.next().unwrap() {
+        TokenTree::Punct(tt) => {
+            assert_eq!(tt.as_char(), '~');
+            assert_eq!(tt.spacing(), Spacing::Alone);
+        }
+        wrong => panic!("wrong token {:?}", wrong),
+    }
+}
+
+#[test]
 fn raw_identifier() {
     let mut tts = TokenStream::from_str("r#dyn").unwrap().into_iter();
     match tts.next().unwrap() {
-        TokenTree::Term(raw) => assert_eq!("r#dyn", raw.as_str()),
+        TokenTree::Ident(raw) => assert_eq!("r#dyn", raw.to_string()),
         wrong => panic!("wrong token {:?}", wrong),
     }
     assert!(tts.next().is_none());
 }
+
+#[test]
+fn test_debug_ident() {
+    let ident = Ident::new("proc_macro", Span::call_site());
+
+    #[cfg(not(procmacro2_semver_exempt))]
+    let expected = "Ident(proc_macro)";
+
+    #[cfg(procmacro2_semver_exempt)]
+    let expected = "Ident { sym: proc_macro, span: bytes(0..0) }";
+
+    assert_eq!(expected, format!("{:?}", ident));
+}
+
+#[test]
+#[cfg(not(feature = "nightly"))]
+fn test_debug_tokenstream() {
+    let tts = TokenStream::from_str("[a + 1]").unwrap();
+
+    #[cfg(not(procmacro2_semver_exempt))]
+    let expected = "\
+TokenStream [
+    Group {
+        delimiter: Bracket,
+        stream: TokenStream [
+            Ident {
+                sym: a
+            },
+            Punct {
+                op: '+',
+                spacing: Alone
+            },
+            Literal {
+                lit: 1
+            }
+        ]
+    }
+]\
+    ";
+
+    #[cfg(procmacro2_semver_exempt)]
+    let expected = "\
+TokenStream [
+    Group {
+        delimiter: Bracket,
+        stream: TokenStream [
+            Ident {
+                sym: a,
+                span: bytes(2..3)
+            },
+            Punct {
+                op: '+',
+                spacing: Alone,
+                span: bytes(4..5)
+            },
+            Literal {
+                lit: 1,
+                span: bytes(6..7)
+            }
+        ],
+        span: bytes(1..8)
+    }
+]\
+    ";
+
+    assert_eq!(expected, format!("{:#?}", tts));
+}
copy from third_party/rust/quote/.cargo-checksum.json
copy to third_party/rust/quote-0.5.2/.cargo-checksum.json
copy from third_party/rust/quote/Cargo.toml
copy to third_party/rust/quote-0.5.2/Cargo.toml
new file mode 100644
--- /dev/null
+++ b/third_party/rust/quote-0.5.2/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+	http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/quote-0.5.2/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2016 The Rust Project Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
copy from third_party/rust/quote/README.md
copy to third_party/rust/quote-0.5.2/README.md
copy from third_party/rust/quote/src/lib.rs
copy to third_party/rust/quote-0.5.2/src/lib.rs
copy from third_party/rust/quote/src/to_tokens.rs
copy to third_party/rust/quote-0.5.2/src/to_tokens.rs
rename from third_party/rust/quote/src/tokens.rs
rename to third_party/rust/quote-0.5.2/src/tokens.rs
copy from third_party/rust/quote/tests/test.rs
copy to third_party/rust/quote-0.5.2/tests/test.rs
--- a/third_party/rust/quote/.cargo-checksum.json
+++ b/third_party/rust/quote/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"8078663280ca2bbda17459a3c2629b84aee2b9904a83f83b87f1bf60e096692f","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"bdb5b5375e8cd37b75b4e0269b8fa9fb22776df9762c1df11ec88eb4cd2dc097","src/lib.rs":"7f72accca88679bc49cc8aedf7d96c922288e66a3d63bf1d06f5da262f02a4ad","src/to_tokens.rs":"6eb18c100701d1f9556cd21b1f2faca3316e85029345274dcfe7691e7ffa254d","src/tokens.rs":"a4939fc092d6466d5a2e75474886152e880586b12e057c0d7bf7b3f22428b2de","tests/test.rs":"35bac59a637a8dc3919df51bfa0957b6f964f408cc63c7a81a3e759ab8557f55"},"package":"9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"}
\ No newline at end of file
+{"files":{"Cargo.toml":"33e512b1a2fd40b4d0b5af4ac16ad4f163e0383ba2f4abcd7a7e575e2af3442c","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"c9a75f18b9ab2927829a208fc6aa2cf4e63b8420887ba29cdb265d6619ae82d5","README.md":"61dc7827fb2e29185f0d73594db326bfdbec8393ca7a48429b259711d42e80f9","src/ext.rs":"2e2f71fca8c8580eeed138da42d93dc21fc48d7a8da973ae6d3b616da6a3b0e3","src/lib.rs":"0dedf7935a0203324804cecdf6350245caa24dbdaaf9e168b7ab90b0883ec0c4","src/to_tokens.rs":"10dc32fbe69798408ee1f49ec25770b90eeb6b069552f50cd4e03228b8e85847","tests/test.rs":"90fe0e9a704e628339fe9298f0cb8307e94ebadfe28fffd7b2fc2d94203bc342"},"package":"e44651a0dc4cdd99f71c83b561e221f714912d11af1a4dff0631f923d53af035"}
\ No newline at end of file
--- a/third_party/rust/quote/Cargo.toml
+++ b/third_party/rust/quote/Cargo.toml
@@ -7,24 +7,24 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "quote"
-version = "0.5.2"
+version = "0.6.3"
 authors = ["David Tolnay <dtolnay@gmail.com>"]
 include = ["Cargo.toml", "src/**/*.rs", "tests/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
 description = "Quasi-quoting macro quote!(...)"
 documentation = "https://docs.rs/quote/"
 readme = "README.md"
 keywords = ["syn"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/dtolnay/quote"
 [dependencies.proc-macro2]
-version = "0.3"
+version = "0.4.4"
 default-features = false
 
 [features]
 default = ["proc-macro"]
 proc-macro = ["proc-macro2/proc-macro"]
--- a/third_party/rust/quote/README.md
+++ b/third_party/rust/quote/README.md
@@ -3,17 +3,17 @@ Rust Quasi-Quoting
 
 [![Build Status](https://api.travis-ci.org/dtolnay/quote.svg?branch=master)](https://travis-ci.org/dtolnay/quote)
 [![Latest Version](https://img.shields.io/crates/v/quote.svg)](https://crates.io/crates/quote)
 [![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/quote/)
 
 This crate provides the [`quote!`] macro for turning Rust syntax tree data
 structures into tokens of source code.
 
-[`quote!`]: https://docs.rs/quote/0.5/quote/macro.quote.html
+[`quote!`]: https://docs.rs/quote/0.6/quote/macro.quote.html
 
 Procedural macros in Rust receive a stream of tokens as input, execute arbitrary
 Rust code to determine how to manipulate those tokens, and produce a stream of
 tokens to hand back to the compiler to compile into the caller's crate.
 Quasi-quoting is a solution to one piece of that -- producing tokens to return
 to the compiler.
 
 The idea of quasi-quoting is that we write *code* that we treat as *data*.
@@ -26,54 +26,56 @@ the macro caller's crate.
 
 This crate is motivated by the procedural macro use case, but is a
 general-purpose Rust quasi-quoting library and is not specific to procedural
 macros.
 
 *Version requirement: Quote supports any compiler version back to Rust's very
 first support for procedural macros in Rust 1.15.0.*
 
+[*Release notes*](https://github.com/dtolnay/quote/releases)
+
 ```toml
 [dependencies]
-quote = "0.5"
+quote = "0.6"
 ```
 
 ```rust
 #[macro_use]
 extern crate quote;
 ```
 
 ## Syntax
 
 The quote crate provides a [`quote!`] macro within which you can write Rust code
-that gets packaged into a [`quote::Tokens`] and can be treated as data. You
-should think of `Tokens` as representing a fragment of Rust source code. Call
-`to_string()` on a `Tokens` to get back the fragment of source code as a string,
-or call `into()` to stream them as a `TokenStream` back to the compiler in a
-procedural macro.
+that gets packaged into a [`TokenStream`] and can be treated as data. You should
+think of `TokenStream` as representing a fragment of Rust source code. This type
+can be returned directly back to the compiler by a procedural macro to get
+compiled into the caller's crate.
 
-[`quote::Tokens`]: https://docs.rs/quote/0.5/quote/struct.Tokens.html
+[`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
 
 Within the `quote!` macro, interpolation is done with `#var`. Any type
 implementing the [`quote::ToTokens`] trait can be interpolated. This includes
 most Rust primitive types as well as most of the syntax tree types from [`syn`].
 
-[`quote::ToTokens`]: https://docs.rs/quote/0.5/quote/trait.ToTokens.html
+[`quote::ToTokens`]: https://docs.rs/quote/0.6/quote/trait.ToTokens.html
 [`syn`]: https://github.com/dtolnay/syn
 
 ```rust
 let tokens = quote! {
     struct SerializeWith #generics #where_clause {
         value: &'a #field_ty,
         phantom: ::std::marker::PhantomData<#item_ty>,
     }
 
     impl #generics serde::Serialize for SerializeWith #generics #where_clause {
         fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
-            where S: serde::Serializer
+        where
+            S: serde::Serializer,
         {
             #path(self.value, s)
         }
     }
 
     SerializeWith {
         value: #value,
         phantom: ::std::marker::PhantomData::<#item_ty>,
@@ -97,24 +99,34 @@ a pre-existing iterator.
 Note that there is a difference between `#(#var ,)*` and `#(#var),*`—the latter
 does not produce a trailing comma. This matches the behavior of delimiters in
 `macro_rules!`.
 
 ## Hygiene
 
 Any interpolated tokens preserve the `Span` information provided by their
 `ToTokens` implementation. Tokens that originate within a `quote!` invocation
-are spanned with [`Span::def_site()`].
+are spanned with [`Span::call_site()`].
 
-[`Span::def_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.def_site
+[`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
 
 A different span can be provided explicitly through the [`quote_spanned!`]
 macro.
 
-[`quote_spanned!`]: https://docs.rs/quote/0.5/quote/macro.quote_spanned.html
+[`quote_spanned!`]: https://docs.rs/quote/0.6/quote/macro.quote_spanned.html
+
+### Limitations
+
+- A non-repeating variable may not be interpolated inside of a repeating block
+  ([#7]).
+- The same variable may not be interpolated more than once inside of a repeating
+  block ([#8]).
+
+[#7]: https://github.com/dtolnay/quote/issues/7
+[#8]: https://github.com/dtolnay/quote/issues/8
 
 ### Recursion limit
 
 The `quote!` macro relies on deep recursion so some large invocations may fail
 with "recursion limit reached" when you compile. If it fails, bump up the
 recursion limit by adding `#![recursion_limit = "128"]` to your crate. An even
 higher limit may be necessary for especially large invocations. You don't need
 this unless the compiler tells you that you need it.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/quote/src/ext.rs
@@ -0,0 +1,113 @@
+use super::ToTokens;
+
+use proc_macro2::{TokenStream, TokenTree};
+
+/// TokenStream extension trait with methods for appending tokens.
+///
+/// This trait is sealed and cannot be implemented outside of the `quote` crate.
+pub trait TokenStreamExt: private::Sealed {
+    fn append<U>(&mut self, token: U)
+    where
+        U: Into<TokenTree>;
+
+    fn append_all<T, I>(&mut self, iter: I)
+    where
+        T: ToTokens,
+        I: IntoIterator<Item = T>;
+
+    fn append_separated<T, I, U>(&mut self, iter: I, op: U)
+    where
+        T: ToTokens,
+        I: IntoIterator<Item = T>,
+        U: ToTokens;
+
+    fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
+    where
+        T: ToTokens,
+        I: IntoIterator<Item = T>,
+        U: ToTokens;
+}
+
+impl TokenStreamExt for TokenStream {
+    /// For use by `ToTokens` implementations.
+    ///
+    /// Appends the token specified to this list of tokens.
+    fn append<U>(&mut self, token: U)
+    where
+        U: Into<TokenTree>,
+    {
+        self.extend(Some(token.into()));
+    }
+
+    /// For use by `ToTokens` implementations.
+    ///
+    /// ```
+    /// # #[macro_use] extern crate quote;
+    /// # extern crate proc_macro2;
+    /// # use quote::{TokenStreamExt, ToTokens};
+    /// # use proc_macro2::TokenStream;
+    /// # fn main() {
+    /// struct X;
+    ///
+    /// impl ToTokens for X {
+    ///     fn to_tokens(&self, tokens: &mut TokenStream) {
+    ///         tokens.append_all(&[true, false]);
+    ///     }
+    /// }
+    ///
+    /// let tokens = quote!(#X);
+    /// assert_eq!(tokens.to_string(), "true false");
+    /// # }
+    /// ```
+    fn append_all<T, I>(&mut self, iter: I)
+    where
+        T: ToTokens,
+        I: IntoIterator<Item = T>,
+    {
+        for token in iter {
+            token.to_tokens(self);
+        }
+    }
+
+    /// For use by `ToTokens` implementations.
+    ///
+    /// Appends all of the items in the iterator `I`, separated by the tokens
+    /// `U`.
+    fn append_separated<T, I, U>(&mut self, iter: I, op: U)
+    where
+        T: ToTokens,
+        I: IntoIterator<Item = T>,
+        U: ToTokens,
+    {
+        for (i, token) in iter.into_iter().enumerate() {
+            if i > 0 {
+                op.to_tokens(self);
+            }
+            token.to_tokens(self);
+        }
+    }
+
+    /// For use by `ToTokens` implementations.
+    ///
+    /// Appends all tokens in the iterator `I`, appending `U` after each
+    /// element, including after the last element of the iterator.
+    fn append_terminated<T, I, U>(&mut self, iter: I, term: U)
+    where
+        T: ToTokens,
+        I: IntoIterator<Item = T>,
+        U: ToTokens,
+    {
+        for token in iter {
+            token.to_tokens(self);
+            term.to_tokens(self);
+        }
+    }
+}
+
+mod private {
+    use proc_macro2::TokenStream;
+
+    pub trait Sealed {}
+
+    impl Sealed for TokenStream {}
+}
--- a/third_party/rust/quote/src/lib.rs
+++ b/third_party/rust/quote/src/lib.rs
@@ -21,17 +21,17 @@
 //! general-purpose Rust quasi-quoting library and is not specific to procedural
 //! macros.
 //!
 //! *Version requirement: Quote supports any compiler version back to Rust's
 //! very first support for procedural macros in Rust 1.15.0.*
 //!
 //! ```toml
 //! [dependencies]
-//! quote = "0.5"
+//! quote = "0.6"
 //! ```
 //!
 //! ```
 //! #[macro_use]
 //! extern crate quote;
 //! #
 //! # fn main() {}
 //! ```
@@ -63,17 +63,18 @@
 //! let tokens = quote! {
 //!     struct SerializeWith #generics #where_clause {
 //!         value: &'a #field_ty,
 //!         phantom: ::std::marker::PhantomData<#item_ty>,
 //!     }
 //!
 //!     impl #generics serde::Serialize for SerializeWith #generics #where_clause {
 //!         fn serialize<S>(&self, s: &mut S) -> Result<(), S::Error>
-//!             where S: serde::Serializer
+//!         where
+//!             S: serde::Serializer,
 //!         {
 //!             #path(self.value, s)
 //!         }
 //!     }
 //!
 //!     SerializeWith {
 //!         value: #value,
 //!         phantom: ::std::marker::PhantomData::<#item_ty>,
@@ -86,51 +87,51 @@
 //! ## Recursion limit
 //!
 //! The `quote!` macro relies on deep recursion so some large invocations may
 //! fail with "recursion limit reached" when you compile. If it fails, bump up
 //! the recursion limit by adding `#![recursion_limit = "128"]` to your crate.
 //! An even higher limit may be necessary for especially large invocations.
 
 // Quote types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/quote/0.5.2")]
+#![doc(html_root_url = "https://docs.rs/quote/0.6.3")]
 
 #[cfg(feature = "proc-macro")]
 extern crate proc_macro;
 extern crate proc_macro2;
 
-mod tokens;
-pub use tokens::Tokens;
+mod ext;
+pub use ext::TokenStreamExt;
 
 mod to_tokens;
 pub use to_tokens::ToTokens;
 
 // Not public API.
 #[doc(hidden)]
 pub mod __rt {
     // Not public API.
     pub use proc_macro2::*;
 
     // Not public API.
-    pub fn parse(tokens: &mut ::Tokens, span: Span, s: &str) {
+    pub fn parse(tokens: &mut TokenStream, span: Span, s: &str) {
         let s: TokenStream = s.parse().expect("invalid token stream");
-        tokens.append_all(s.into_iter().map(|mut t| {
+        tokens.extend(s.into_iter().map(|mut t| {
             t.set_span(span);
             t
         }));
     }
 }
 
 /// The whole point.
 ///
 /// Performs variable interpolation against the input and produces it as
-/// [`Tokens`]. For returning tokens to the compiler in a procedural macro, use
+/// [`TokenStream`]. For returning tokens to the compiler in a procedural macro, use
 /// `into()` to build a `TokenStream`.
 ///
-/// [`Tokens`]: struct.Tokens.html
+/// [`TokenStream`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.TokenStream.html
 ///
 /// # Interpolation
 ///
 /// Variable interpolation is done with `#var` (similar to `$var` in
 /// `macro_rules!` macros). This grabs the `var` variable that is currently in
 /// scope and inserts it in that location in the output tokens. The variable
 /// must implement the [`ToTokens`] trait.
 ///
@@ -148,28 +149,27 @@ pub mod __rt {
 /// - `#( #k => println!("{}", #v), )*` — even multiple interpolations
 ///
 /// # Hygiene
 ///
 /// Any interpolated tokens preserve the `Span` information provided by their
 /// `ToTokens` implementation. Tokens that originate within the `quote!`
 /// invocation are spanned with [`Span::call_site()`].
 ///
-/// [`Span::call_site()`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html#method.call_site
+/// [`Span::call_site()`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html#method.call_site
 ///
 /// A different span can be provided through the [`quote_spanned!`] macro.
 ///
 /// [`quote_spanned!`]: macro.quote_spanned.html
 ///
 /// # Example
 ///
 /// ```
-/// # #[cfg(feature = "proc-macro")]
+/// # #[cfg(any())]
 /// extern crate proc_macro;
-/// # #[cfg(not(feature = "proc-macro"))]
 /// # extern crate proc_macro2 as proc_macro;
 ///
 /// #[macro_use]
 /// extern crate quote;
 ///
 /// use proc_macro::TokenStream;
 ///
 /// # const IGNORE_TOKENS: &'static str = stringify! {
@@ -209,17 +209,17 @@ macro_rules! quote {
 /// the macro invocation.
 ///
 /// # Syntax
 ///
 /// A span expression of type [`Span`], followed by `=>`, followed by the tokens
 /// to quote. The span expression should be brief -- use a variable for anything
 /// more than a few characters. There should be no space before the `=>` token.
 ///
-/// [`Span`]: https://docs.rs/proc-macro2/0.2/proc_macro2/struct.Span.html
+/// [`Span`]: https://docs.rs/proc-macro2/0.4/proc_macro2/struct.Span.html
 ///
 /// ```
 /// # #[macro_use]
 /// # extern crate quote;
 /// # extern crate proc_macro2;
 /// #
 /// # use proc_macro2::Span;
 /// #
@@ -260,29 +260,29 @@ macro_rules! quote {
 ///
 /// [`Sync`]: https://doc.rust-lang.org/std/marker/trait.Sync.html
 ///
 /// ```
 /// # #[macro_use]
 /// # extern crate quote;
 /// # extern crate proc_macro2;
 /// #
-/// # use quote::{Tokens, ToTokens};
-/// # use proc_macro2::Span;
+/// # use quote::{TokenStreamExt, ToTokens};
+/// # use proc_macro2::{Span, TokenStream};
 /// #
 /// # struct Type;
 /// #
 /// # impl Type {
 /// #     fn span(&self) -> Span {
 /// #         Span::call_site()
 /// #     }
 /// # }
 /// #
 /// # impl ToTokens for Type {
-/// #     fn to_tokens(&self, _tokens: &mut Tokens) {}
+/// #     fn to_tokens(&self, _tokens: &mut TokenStream) {}
 /// # }
 /// #
 /// # fn main() {
 /// # let ty = Type;
 /// # let call_site = Span::call_site();
 /// #
 /// let ty_span = ty.span();
 /// let assert_sync = quote_spanned! {ty_span=>
@@ -308,17 +308,17 @@ macro_rules! quote {
 /// that `Sync` resolves at the macro definition site and not the macro call
 /// site. If we resolve `Sync` at the same span that the user's type is going to
 /// be resolved, then they could bypass our check by defining their own trait
 /// named `Sync` that is implemented for their type.
 #[macro_export]
 macro_rules! quote_spanned {
     ($span:expr=> $($tt:tt)*) => {
         {
-            let mut _s = $crate::Tokens::new();
+            let mut _s = $crate::__rt::TokenStream::new();
             let _span = $span;
             quote_each_token!(_s _span $($tt)*);
             _s
         }
     };
 }
 
 // Extract the names of all #metavariables and pass them to the $finish macro.
@@ -446,64 +446,64 @@ macro_rules! quote_each_token {
             }
             quote_each_token!($tokens $span $($inner)*);
         }
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident # [ $($inner:tt)* ] $($rest:tt)*) => {
         quote_each_token!($tokens $span #);
-        $tokens.append({
+        $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Bracket,
                 quote_spanned!($span=> $($inner)*).into(),
             );
             g.set_span($span);
-            g
+            Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident # $first:ident $($rest:tt)*) => {
         $crate::ToTokens::to_tokens(&$first, &mut $tokens);
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident ( $($first:tt)* ) $($rest:tt)*) => {
-        $tokens.append({
+        $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Parenthesis,
                 quote_spanned!($span=> $($first)*).into(),
             );
             g.set_span($span);
-            g
+            Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident [ $($first:tt)* ] $($rest:tt)*) => {
-        $tokens.append({
+        $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Bracket,
                 quote_spanned!($span=> $($first)*).into(),
             );
             g.set_span($span);
-            g
+            Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident { $($first:tt)* } $($rest:tt)*) => {
-        $tokens.append({
+        $tokens.extend({
             let mut g = $crate::__rt::Group::new(
                 $crate::__rt::Delimiter::Brace,
                 quote_spanned!($span=> $($first)*).into(),
             );
             g.set_span($span);
-            g
+            Some($crate::__rt::TokenTree::from(g))
         });
         quote_each_token!($tokens $span $($rest)*);
     };
 
     ($tokens:ident $span:ident $first:tt $($rest:tt)*) => {
         // TODO: this seems slow... special case some `:tt` arguments?
         $crate::__rt::parse(&mut $tokens, $span, stringify!($first));
         quote_each_token!($tokens $span $($rest)*);
--- a/third_party/rust/quote/src/to_tokens.rs
+++ b/third_party/rust/quote/src/to_tokens.rs
@@ -1,116 +1,123 @@
-use super::Tokens;
+use super::TokenStreamExt;
 
 use std::borrow::Cow;
 
-use proc_macro2::{Group, Literal, Op, Span, Term, TokenStream, TokenTree};
+use proc_macro2::{Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
 
 /// Types that can be interpolated inside a [`quote!`] invocation.
 ///
 /// [`quote!`]: macro.quote.html
 pub trait ToTokens {
-    /// Write `self` to the given `Tokens`.
+    /// Write `self` to the given `TokenStream`.
+    ///
+    /// The token append methods provided by the [`TokenStreamExt`] extension
+    /// trait may be useful for implementing `ToTokens`.
+    ///
+    /// [`TokenStreamExt`]: trait.TokenStreamExt.html
+    ///
+    /// # Example
     ///
     /// Example implementation for a struct representing Rust paths like
     /// `std::cmp::PartialEq`:
     ///
     /// ```
     /// extern crate quote;
-    /// use quote::{Tokens, ToTokens};
+    /// use quote::{TokenStreamExt, ToTokens};
     ///
     /// extern crate proc_macro2;
-    /// use proc_macro2::{TokenTree, Spacing, Span, Op};
+    /// use proc_macro2::{TokenTree, Spacing, Span, Punct, TokenStream};
     ///
     /// pub struct Path {
     ///     pub global: bool,
     ///     pub segments: Vec<PathSegment>,
     /// }
     ///
     /// impl ToTokens for Path {
-    ///     fn to_tokens(&self, tokens: &mut Tokens) {
+    ///     fn to_tokens(&self, tokens: &mut TokenStream) {
     ///         for (i, segment) in self.segments.iter().enumerate() {
     ///             if i > 0 || self.global {
     ///                 // Double colon `::`
-    ///                 tokens.append(Op::new(':', Spacing::Joint));
-    ///                 tokens.append(Op::new(':', Spacing::Alone));
+    ///                 tokens.append(Punct::new(':', Spacing::Joint));
+    ///                 tokens.append(Punct::new(':', Spacing::Alone));
     ///             }
     ///             segment.to_tokens(tokens);
     ///         }
     ///     }
     /// }
     /// #
     /// # pub struct PathSegment;
     /// #
     /// # impl ToTokens for PathSegment {
-    /// #     fn to_tokens(&self, tokens: &mut Tokens) {
+    /// #     fn to_tokens(&self, tokens: &mut TokenStream) {
     /// #         unimplemented!()
     /// #     }
     /// # }
     /// #
     /// # fn main() {}
     /// ```
-    fn to_tokens(&self, tokens: &mut Tokens);
+    fn to_tokens(&self, tokens: &mut TokenStream);
 
-    /// Convert `self` directly into a `Tokens` object.
+    /// Convert `self` directly into a `TokenStream` object.
     ///
     /// This method is implicitly implemented using `to_tokens`, and acts as a
     /// convenience method for consumers of the `ToTokens` trait.
-    fn into_tokens(self) -> Tokens
+    fn into_token_stream(self) -> TokenStream
     where
         Self: Sized,
     {
-        let mut tokens = Tokens::new();
+        let mut tokens = TokenStream::new();
         self.to_tokens(&mut tokens);
         tokens
     }
 }
 
 impl<'a, T: ?Sized + ToTokens> ToTokens for &'a T {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         (**self).to_tokens(tokens);
     }
 }
 
 impl<'a, T: ?Sized + ToOwned + ToTokens> ToTokens for Cow<'a, T> {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         (**self).to_tokens(tokens);
     }
 }
 
 impl<T: ?Sized + ToTokens> ToTokens for Box<T> {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         (**self).to_tokens(tokens);
     }
 }
 
 impl<T: ToTokens> ToTokens for Option<T> {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         if let Some(ref t) = *self {
             t.to_tokens(tokens);
         }
     }
 }
 
 impl ToTokens for str {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(Literal::string(self));
     }
 }
 
 impl ToTokens for String {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         self.as_str().to_tokens(tokens);
     }
 }
 
 macro_rules! primitive {
     ($($t:ident => $name:ident)*) => ($(
         impl ToTokens for $t {
-            fn to_tokens(&self, tokens: &mut Tokens) {
+            fn to_tokens(&self, tokens: &mut TokenStream) {
                 tokens.append(Literal::$name(*self));
             }
         }
     )*)
 }
 
 primitive! {
     i8 => i8_suffixed
@@ -125,55 +132,59 @@ primitive! {
     u64 => u64_suffixed
     usize => usize_suffixed
 
     f32 => f32_suffixed
     f64 => f64_suffixed
 }
 
 impl ToTokens for char {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(Literal::character(*self));
     }
 }
 
 impl ToTokens for bool {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         let word = if *self { "true" } else { "false" };
-        tokens.append(Term::new(word, Span::call_site()));
+        tokens.append(Ident::new(word, Span::call_site()));
     }
 }
 
 impl ToTokens for Group {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(self.clone());
     }
 }
 
-impl ToTokens for Term {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+impl ToTokens for Ident {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(self.clone());
     }
 }
 
-impl ToTokens for Op {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+impl ToTokens for Punct {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(self.clone());
     }
 }
 
 impl ToTokens for Literal {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         tokens.append(self.clone());
     }
 }
 
 impl ToTokens for TokenTree {
-    fn to_tokens(&self, dst: &mut Tokens) {
+    fn to_tokens(&self, dst: &mut TokenStream) {
         dst.append(self.clone());
     }
 }
 
 impl ToTokens for TokenStream {
-    fn to_tokens(&self, dst: &mut Tokens) {
+    fn to_tokens(&self, dst: &mut TokenStream) {
         dst.append_all(self.clone().into_iter());
     }
+
+    fn into_token_stream(self) -> TokenStream {
+        self
+    }
 }
--- a/third_party/rust/quote/tests/test.rs
+++ b/third_party/rust/quote/tests/test.rs
@@ -1,39 +1,40 @@
 #![cfg_attr(feature = "cargo-clippy", allow(blacklisted_name))]
 
 use std::borrow::Cow;
 
 extern crate proc_macro2;
 #[macro_use]
 extern crate quote;
 
-use proc_macro2::{Span, Term};
+use proc_macro2::{Ident, Span, TokenStream};
+use quote::TokenStreamExt;
 
 struct X;
 
 impl quote::ToTokens for X {
-    fn to_tokens(&self, tokens: &mut quote::Tokens) {
-        tokens.append(Term::new("X", Span::call_site()));
+    fn to_tokens(&self, tokens: &mut TokenStream) {
+        tokens.append(Ident::new("X", Span::call_site()));
     }
 }
 
 #[test]
 fn test_quote_impl() {
     let tokens = quote! {
         impl<'a, T: ToTokens> ToTokens for &'a T {
-            fn to_tokens(&self, tokens: &mut Tokens) {
+            fn to_tokens(&self, tokens: &mut TokenStream) {
                 (**self).to_tokens(tokens)
             }
         }
     };
 
     let expected = concat!(
         "impl < 'a , T : ToTokens > ToTokens for & 'a T { ",
-        "fn to_tokens ( & self , tokens : & mut Tokens ) { ",
+        "fn to_tokens ( & self , tokens : & mut TokenStream ) { ",
         "( * * self ) . to_tokens ( tokens ) ",
         "} ",
         "}"
     );
 
     assert_eq!(expected, tokens.to_string());
 }
 
@@ -177,18 +178,18 @@ fn test_string() {
     let s = "\0 a 'b \" c".to_string();
     let tokens = quote!(#s);
     let expected = "\"\\u{0} a \\'b \\\" c\"";
     assert_eq!(expected, tokens.to_string());
 }
 
 #[test]
 fn test_ident() {
-    let foo = Term::new("Foo", Span::call_site());
-    let bar = Term::new(&format!("Bar{}", 7), Span::call_site());
+    let foo = Ident::new("Foo", Span::call_site());
+    let bar = Ident::new(&format!("Bar{}", 7), Span::call_site());
     let tokens = quote!(struct #foo; enum #bar {});
     let expected = "struct Foo ; enum Bar7 { }";
     assert_eq!(expected, tokens.to_string());
 }
 
 #[test]
 fn test_duplicate() {
     let ch = 'x';
@@ -252,29 +253,29 @@ fn test_empty_quote() {
 fn test_box_str() {
     let b = "str".to_owned().into_boxed_str();
     let tokens = quote! { #b };
     assert_eq!("\"str\"", tokens.to_string());
 }
 
 #[test]
 fn test_cow() {
-    let owned: Cow<Term> = Cow::Owned(Term::new("owned", Span::call_site()));
+    let owned: Cow<Ident> = Cow::Owned(Ident::new("owned", Span::call_site()));
 
-    let ident = Term::new("borrowed", Span::call_site());
+    let ident = Ident::new("borrowed", Span::call_site());
     let borrowed = Cow::Borrowed(&ident);
 
     let tokens = quote! { #owned #borrowed };
     assert_eq!("owned borrowed", tokens.to_string());
 }
 
 #[test]
 fn test_closure() {
-    fn field_i(i: usize) -> Term {
-        Term::new(&format!("__field{}", i), Span::call_site())
+    fn field_i(i: usize) -> Ident {
+        Ident::new(&format!("__field{}", i), Span::call_site())
     }
 
     let fields = (0usize..3)
         .map(field_i as fn(_) -> _)
         .map(|var| quote! { #var });
 
     let tokens = quote! { #(#fields)* };
     assert_eq!("__field0 __field1 __field2", tokens.to_string());
--- a/third_party/rust/serde/.cargo-checksum.json
+++ b/third_party/rust/serde/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"cf6e0bc93d4aa0e3a918240d385ecd9f97e2811c3a408d1acd5d31a3da23df15","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"16de77b2d10fac8c70219ba183ad083ae12d4553c6f3213dec39d9936622b371","build.rs":"c441b6b782f4962671f74d86db0442ae8e93aced058dac2d4c17044e1740af4c","src/de/from_primitive.rs":"46b635d939b10163a01739b082cff4b7f2635579920f8160aeadc03f33ad2b51","src/de/ignored_any.rs":"ea47caa53548c394a70bb512afd75b2de7fc9937b33518606282e4084dc70a77","src/de/impls.rs":"072a58dc9c33cc021ecbfc91626d5ea403b263fff0bbfa4516db585719ccc646","src/de/mod.rs":"2df8b3522b03e4bab5a5e0aa6ad45638c8bd21ea048aab5852f8cde5da405268","src/de/utf8.rs":"956b124b7ce98353cb781b56e43a6fed2e67f1389d35b7a468d5be75b1485853","src/de/value.rs":"004a5892e75fdfe015399027429da0ad4ee4ac3db50a980a38339f20bdbdd8e3","src/export.rs":"4e3ed8aa2b0e5d9c18f462183dff7fa4772a30e88e6b3cc0fb9712282ecbe0c5","src/lib.rs":"91df096c81fd3a478d337d29249d0f25eedb872ea00a261eb993df1897386fff","src/macros.rs":"a563019118ab3b1d408fb5fb9f2f80ec64cb53fe7af0fc0e0f35c2cf503b92f8","src/private/de.rs":"283c690c87db77f5adc6d03d62a9731a77b2c79a4d10ec5884d7872d80d573d0","src/private/macros.rs":"6861a4f332ea24d0ed5db1c28fe3105d2716523902f045c0bbbd439ebf9e44de","src/private/mod.rs":"0c774d86042cefdb447857070a1d4d2c0b9f519a7f5db588a1e7fcc16ca5a511","src/private/ser.rs":"189a091e1e77354c7859f34dc564879ad0116bb9fc340b55c3a718ae3126b36a","src/ser/impls.rs":"d5434a4eed1d82b040dc8a73a566a3bc4c085d99a20585654057461088fe0bb8","src/ser/impossible.rs":"91da408362284ec567b2316050900251ff66a1134413963720154fb70f3919c0","src/ser/mod.rs":"cc6c6155139df6e7270d880ad9e5aafb60542a6e0c2230620e0e6315858f82be"},"package":"34e9df8efbe7a2c12ceec1fc8744d56ae3374d8ae325f4a0028949d16433d554"}
\ No newline at end of file
+{"files":{"Cargo.toml":"844ca61f123bafc3da9cf0409d3425f20d3796e38607e88f33fe328c277589e9","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"fe0d3aa328d0e26c76f646c45197bc8c518257d31f3640baed096915b9bdf642","build.rs":"8a7c78d54d7a36e31b21014a9818a3847d6e4e6ee439ecc1d34e6fdef3927ae7","crates-io.md":"4de7b932987feadedd392855d166d0346e8f405190e8085a2e2f84c936a68959","src/de/from_primitive.rs":"3cf03cb47d884ff32555673831d79de8f6bf0c58a22783d73077d1f694c1494d","src/de/ignored_any.rs":"ea47caa53548c394a70bb512afd75b2de7fc9937b33518606282e4084dc70a77","src/de/impls.rs":"19fe62cd61cf74d58eebcec1ad7f96aa5f0824d0173c214d9b0784affd00ab35","src/de/mod.rs":"31466428748f808de1b7f1940047b5c46474604745fc898f39eee182075204aa","src/de/utf8.rs":"956b124b7ce98353cb781b56e43a6fed2e67f1389d35b7a468d5be75b1485853","src/de/value.rs":"5883ea09fb61c30fcb7422a000688481f4fae4c0980ca4618f8b5747b573d545","src/export.rs":"4e3ed8aa2b0e5d9c18f462183dff7fa4772a30e88e6b3cc0fb9712282ecbe0c5","src/integer128.rs":"1bf9e18047a4a3f95a43b8d253e4f844b6be62e71684e47d980582462bce4223","src/lib.rs":"7b24f7fff89cc5122ab1a128285425ad8c287598bbd973a937aa20c5340cacf3","src/macros.rs":"bcffeb59dd91a23368f0c0e5cca5bdbf9dedc7850162c1d571b5d2902a73a309","src/private/de.rs":"3b2dc579aea37c8f231ed2228a584f146f71799f393547583cc7ea182ec551e6","src/private/macros.rs":"6861a4f332ea24d0ed5db1c28fe3105d2716523902f045c0bbbd439ebf9e44de","src/private/mod.rs":"0c774d86042cefdb447857070a1d4d2c0b9f519a7f5db588a1e7fcc16ca5a511","src/private/ser.rs":"56b61986a7c57d37d65d763cb69119889a3dc06eb27a9e95cd5e758acf0e4535","src/ser/impls.rs":"3735a8620dd0e7ff33e33c15f844ecb76207d5101389bf45abca732bd48294eb","src/ser/impossible.rs":"91da408362284ec567b2316050900251ff66a1134413963720154fb70f3919c0","src/ser/mod.rs":"5a1c38bfb1898e7fd41f8f5b53c9fc952edb66d23de1356941050314f37504a0"},"package":"e9a2d9a9ac5120e0f768801ca2b58ad6eec929dc9d1d616c162f208869c2ce95"}
\ No newline at end of file
--- a/third_party/rust/serde/Cargo.toml
+++ b/third_party/rust/serde/Cargo.toml
@@ -7,24 +7,24 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "serde"
-version = "1.0.58"
+version = "1.0.66"
 authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"]
 build = "build.rs"
-include = ["Cargo.toml", "build.rs", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
+include = ["Cargo.toml", "build.rs", "src/**/*.rs", "crates-io.md", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
 description = "A generic serialization/deserialization framework"
 homepage = "https://serde.rs"
 documentation = "https://docs.serde.rs/serde/"
-readme = "README.md"
+readme = "crates-io.md"
 keywords = ["serde", "serialization", "no_std"]
 categories = ["encoding"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/serde-rs/serde"
 [dependencies.serde_derive]
 version = "1.0"
 optional = true
 [dev-dependencies.serde_derive]
--- a/third_party/rust/serde/README.md
+++ b/third_party/rust/serde/README.md
@@ -20,17 +20,17 @@ You may be looking for:
 - [API documentation](https://docs.serde.rs/serde/)
 - [Release notes](https://github.com/serde-rs/serde/releases)
 
 ## Serde in action
 
 <details>
 <summary>
 Click to show Cargo.toml.
-<a href="http://play.integer32.com/?gist=9003c5b88c1f4989941925d7190c6eec" target="_blank">Run this code in the playground.</a>
+<a href="https://play.rust-lang.org/?gist=9003c5b88c1f4989941925d7190c6eec" target="_blank">Run this code in the playground.</a>
 </summary>
 
 ```toml
 [dependencies]
 
 # The core APIs, including the Serialize and Deserialize traits. Always
 # required when using Serde.
 serde = "1.0"
--- a/third_party/rust/serde/build.rs
+++ b/third_party/rust/serde/build.rs
@@ -1,41 +1,78 @@
 use std::env;
 use std::process::Command;
 use std::str::{self, FromStr};
 
+// The rustc-cfg strings below are *not* public API. Please let us know by
+// opening a GitHub issue if your build environment requires some way to enable
+// these cfgs other than by executing our build script.
 fn main() {
-    let rustc = match env::var_os("RUSTC") {
-        Some(rustc) => rustc,
+    let minor = match rustc_minor_version() {
+        Some(minor) => minor,
         None => return,
     };
 
-    let output = match Command::new(rustc).arg("--version").output() {
-        Ok(output) => output,
-        Err(_) => return,
-    };
-
-    let version = match str::from_utf8(&output.stdout) {
-        Ok(version) => version,
-        Err(_) => return,
-    };
-
-    let mut pieces = version.split('.');
-    if pieces.next() != Some("rustc 1") {
-        return;
+    // CString::into_boxed_c_str stabilized in Rust 1.20:
+    // https://doc.rust-lang.org/std/ffi/struct.CString.html#method.into_boxed_c_str
+    if minor >= 20 {
+        println!("cargo:rustc-cfg=de_boxed_c_str");
     }
 
-    let next = match pieces.next() {
-        Some(next) => next,
-        None => return,
-    };
+    // From<Box<T>> for Rc<T> / Arc<T> stabilized in Rust 1.21:
+    // https://doc.rust-lang.org/std/rc/struct.Rc.html#impl-From<Box<T>>
+    // https://doc.rust-lang.org/std/sync/struct.Arc.html#impl-From<Box<T>>
+    if minor >= 21 {
+        println!("cargo:rustc-cfg=de_rc_dst");
+    }
 
-    let minor = match u32::from_str(next) {
-        Ok(minor) => minor,
-        Err(_) => return,
-    };
+    // Duration available in core since Rust 1.25:
+    // https://blog.rust-lang.org/2018/03/29/Rust-1.25.html#library-stabilizations
+    if minor >= 25 {
+        println!("cargo:rustc-cfg=core_duration");
+    }
 
     // 128-bit integers stabilized in Rust 1.26:
     // https://blog.rust-lang.org/2018/05/10/Rust-1.26.html
     if minor >= 26 {
         println!("cargo:rustc-cfg=integer128");
     }
+
+    // Non-zero integers stabilized in Rust 1.28:
+    // https://github.com/rust-lang/rust/pull/50808
+    if minor >= 28 {
+        println!("cargo:rustc-cfg=num_nonzero");
+    }
 }
+
+fn rustc_minor_version() -> Option<u32> {
+    let rustc = match env::var_os("RUSTC") {
+        Some(rustc) => rustc,
+        None => return None,
+    };
+
+    let output = match Command::new(rustc).arg("--version").output() {
+        Ok(output) => output,
+        Err(_) => return None,
+    };
+
+    let version = match str::from_utf8(&output.stdout) {
+        Ok(version) => version,
+        Err(_) => return None,
+    };
+
+    // Temporary workaround to support the old 1.26-dev compiler on docs.rs.
+    if version.contains("0eb87c9bf") {
+        return Some(25);
+    }
+
+    let mut pieces = version.split('.');
+    if pieces.next() != Some("rustc 1") {
+        return None;
+    }
+
+    let next = match pieces.next() {
+        Some(next) => next,
+        None => return None,
+    };
+
+    u32::from_str(next).ok()
+}
new file mode 100644
--- /dev/null
+++ b/third_party/rust/serde/crates-io.md
@@ -0,0 +1,55 @@
+<!-- Serde readme rendered on crates.io -->
+
+**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
+
+---
+
+You may be looking for:
+
+- [An overview of Serde](https://serde.rs/)
+- [Data formats supported by Serde](https://serde.rs/#data-formats)
+- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/codegen.html)
+- [Examples](https://serde.rs/examples.html)
+- [API documentation](https://docs.serde.rs/serde/)
+- [Release notes](https://github.com/serde-rs/serde/releases)
+
+## Serde in action
+
+```rust
+#[macro_use]
+extern crate serde_derive;
+
+extern crate serde;
+extern crate serde_json;
+
+#[derive(Serialize, Deserialize, Debug)]
+struct Point {
+    x: i32,
+    y: i32,
+}
+
+fn main() {
+    let point = Point { x: 1, y: 2 };
+
+    // Convert the Point to a JSON string.
+    let serialized = serde_json::to_string(&point).unwrap();
+
+    // Prints serialized = {"x":1,"y":2}
+    println!("serialized = {}", serialized);
+
+    // Convert the JSON string back to a Point.
+    let deserialized: Point = serde_json::from_str(&serialized).unwrap();
+
+    // Prints deserialized = Point { x: 1, y: 2 }
+    println!("deserialized = {:?}", deserialized);
+}
+```
+
+## Getting help
+
+Serde developers live in the #serde channel on
+[`irc.mozilla.org`](https://wiki.mozilla.org/IRC). The #rust channel is also a
+good resource with generally faster response time but less specific knowledge
+about Serde. If IRC is not your thing or you don't get a good response, we are
+happy to respond to [GitHub issues](https://github.com/serde-rs/serde/issues/new)
+as well.
--- a/third_party/rust/serde/src/de/from_primitive.rs
+++ b/third_party/rust/serde/src/de/from_primitive.rs
@@ -173,8 +173,96 @@ impl_from_primitive_for_int!(i32);
 impl_from_primitive_for_int!(i64);
 impl_from_primitive_for_uint!(usize);
 impl_from_primitive_for_uint!(u8);
 impl_from_primitive_for_uint!(u16);
 impl_from_primitive_for_uint!(u32);
 impl_from_primitive_for_uint!(u64);
 impl_from_primitive_for_float!(f32);
 impl_from_primitive_for_float!(f64);
+
+serde_if_integer128! {
+    impl FromPrimitive for i128 {
+        #[inline]
+        fn from_i8(n: i8) -> Option<Self> {
+            Some(n as i128)
+        }
+        #[inline]
+        fn from_i16(n: i16) -> Option<Self> {
+            Some(n as i128)
+        }
+        #[inline]
+        fn from_i32(n: i32) -> Option<Self> {
+            Some(n as i128)
+        }
+        #[inline]
+        fn from_i64(n: i64) -> Option<Self> {
+            Some(n as i128)
+        }
+        #[inline]
+        fn from_u8(n: u8) -> Option<Self> {
+            Some(n as i128)
+        }
+        #[inline]
+        fn from_u16(n: u16) -> Option<Self> {
+            Some(n as i128)
+        }
+        #[inline]
+        fn from_u32(n: u32) -> Option<Self> {
+            Some(n as i128)
+        }
+        #[inline]
+        fn from_u64(n: u64) -> Option<Self> {
+            Some(n as i128)
+        }
+    }
+
+    impl FromPrimitive for u128 {
+        #[inline]
+        fn from_i8(n: i8) -> Option<Self> {
+            if n >= 0 {
+                Some(n as u128)
+            } else {
+                None
+            }
+        }
+        #[inline]
+        fn from_i16(n: i16) -> Option<Self> {
+            if n >= 0 {
+                Some(n as u128)
+            } else {
+                None
+            }
+        }
+        #[inline]
+        fn from_i32(n: i32) -> Option<Self> {
+            if n >= 0 {
+                Some(n as u128)
+            } else {
+                None
+            }
+        }
+        #[inline]
+        fn from_i64(n: i64) -> Option<Self> {
+            if n >= 0 {
+                Some(n as u128)
+            } else {
+                None
+            }
+        }
+        #[inline]
+        fn from_u8(n: u8) -> Option<Self> {
+            Some(n as u128)
+        }
+        #[inline]
+        fn from_u16(n: u16) -> Option<Self> {
+            Some(n as u128)
+        }
+        #[inline]
+        fn from_u32(n: u32) -> Option<Self> {
+            Some(n as u128)
+        }
+        #[inline]
+        fn from_u64(n: u64) -> Option<Self> {
+            Some(n as u128)
+        }
+    }
+}
--- a/third_party/rust/serde/src/de/impls.rs
+++ b/third_party/rust/serde/src/de/impls.rs
@@ -7,17 +7,17 @@
 // except according to those terms.
 
 use lib::*;
 
 use de::{
     Deserialize, Deserializer, EnumAccess, Error, SeqAccess, Unexpected, VariantAccess, Visitor,
 };
 
-#[cfg(any(feature = "std", feature = "alloc"))]
+#[cfg(any(core_duration, feature = "std", feature = "alloc"))]
 use de::MapAccess;
 
 use de::from_primitive::FromPrimitive;
 use private::de::InPlaceSeed;
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 use private::de::size_hint;
 
@@ -44,16 +44,26 @@ impl<'de> Deserialize<'de> for () {
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
     where
         D: Deserializer<'de>,
     {
         deserializer.deserialize_unit(UnitVisitor)
     }
 }
 
+#[cfg(feature = "unstable")]
+impl<'de> Deserialize<'de> for ! {
+    fn deserialize<D>(_deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        Err(Error::custom("cannot deserialize `!`"))
+    }
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 struct BoolVisitor;
 
 impl<'de> Visitor<'de> for BoolVisitor {
     type Value = bool;
 
     fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
@@ -161,16 +171,102 @@ impl_deserialize_num!(u8, deserialize_u8
 impl_deserialize_num!(u16, deserialize_u16, integer);
 impl_deserialize_num!(u32, deserialize_u32, integer);
 impl_deserialize_num!(u64, deserialize_u64, integer);
 impl_deserialize_num!(usize, deserialize_u64, integer);
 
 impl_deserialize_num!(f32, deserialize_f32, integer, float);
 impl_deserialize_num!(f64, deserialize_f64, integer, float);
 
+serde_if_integer128! {
+    impl<'de> Deserialize<'de> for i128 {
+        #[inline]
+        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+        where
+            D: Deserializer<'de>,
+        {
+            struct PrimitiveVisitor;
+
+            impl<'de> Visitor<'de> for PrimitiveVisitor {
+                type Value = i128;
+
+                fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                    formatter.write_str("i128")
+                }
+
+                impl_deserialize_num!(integer i128);
+
+                #[inline]
+                fn visit_i128<E>(self, v: i128) -> Result<Self::Value, E>
+                where
+                    E: Error,
+                {
+                    Ok(v)
+                }
+
+                #[inline]
+                fn visit_u128<E>(self, v: u128) -> Result<Self::Value, E>
+                where
+                    E: Error,
+                {
+                    if v <= i128::max_value() as u128 {
+                        Ok(v as i128)
+                    } else {
+                        Err(Error::invalid_value(Unexpected::Other("u128"), &self))
+                    }
+                }
+            }
+
+            deserializer.deserialize_i128(PrimitiveVisitor)
+        }
+    }
+
+    impl<'de> Deserialize<'de> for u128 {
+        #[inline]
+        fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+        where
+            D: Deserializer<'de>,
+        {
+            struct PrimitiveVisitor;
+
+            impl<'de> Visitor<'de> for PrimitiveVisitor {
+                type Value = u128;
+
+                fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                    formatter.write_str("u128")
+                }
+
+                impl_deserialize_num!(integer u128);
+
+                #[inline]
+                fn visit_i128<E>(self, v: i128) -> Result<Self::Value, E>
+                where
+                    E: Error,
+                {
+                    if v >= 0 {
+                        Ok(v as u128)
+                    } else {
+                        Err(Error::invalid_value(Unexpected::Other("i128"), &self))
+                    }
+                }
+
+                #[inline]
+                fn visit_u128<E>(self, v: u128) -> Result<Self::Value, E>
+                where
+                    E: Error,
+                {
+                    Ok(v)
+                }
+            }
+
+            deserializer.deserialize_u128(PrimitiveVisitor)
+        }
+    }
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 struct CharVisitor;
 
 impl<'de> Visitor<'de> for CharVisitor {
     type Value = char;
 
     fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
@@ -482,17 +578,17 @@ macro_rules! forwarded_impl {
                 D: Deserializer<'de>,
             {
                 Deserialize::deserialize(deserializer).map($func)
             }
         }
     }
 }
 
-#[cfg(all(feature = "std", feature = "unstable"))]
+#[cfg(all(feature = "std", de_boxed_c_str))]
 forwarded_impl!((), Box<CStr>, CString::into_boxed_c_str);
 
 ////////////////////////////////////////////////////////////////////////////////
 
 struct OptionVisitor<T> {
     marker: PhantomData<T>,
 }
 
@@ -1081,25 +1177,41 @@ map_impl!(
     map,
     HashMap::with_hasher(S::default()),
     HashMap::with_capacity_and_hasher(size_hint::cautious(map.size_hint()), S::default()));
 
 ////////////////////////////////////////////////////////////////////////////////
 
 #[cfg(feature = "std")]
 macro_rules! parse_ip_impl {
-    ($ty:ty; $size:expr) => {
+    ($expecting:tt $ty:ty; $size:tt) => {
         impl<'de> Deserialize<'de> for $ty {
             fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
             where
                 D: Deserializer<'de>,
             {
                 if deserializer.is_human_readable() {
-                    let s = try!(String::deserialize(deserializer));
-                    s.parse().map_err(Error::custom)
+                    struct IpAddrVisitor;
+
+                    impl<'de> Visitor<'de> for IpAddrVisitor {
+                        type Value = $ty;
+
+                        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                            formatter.write_str($expecting)
+                        }
+
+                        fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+                        where
+                            E: Error,
+                        {
+                            s.parse().map_err(Error::custom)
+                        }
+                    }
+
+                    deserializer.deserialize_str(IpAddrVisitor)
                 } else {
                     <[u8; $size]>::deserialize(deserializer).map(<$ty>::from)
                 }
             }
         }
     };
 }
 
@@ -1217,79 +1329,127 @@ macro_rules! deserialize_enum {
 
 #[cfg(feature = "std")]
 impl<'de> Deserialize<'de> for net::IpAddr {
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
     where
         D: Deserializer<'de>,
     {
         if deserializer.is_human_readable() {
-            let s = try!(String::deserialize(deserializer));
-            s.parse().map_err(Error::custom)
+            struct IpAddrVisitor;
+
+            impl<'de> Visitor<'de> for IpAddrVisitor {
+                type Value = net::IpAddr;
+
+                fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                    formatter.write_str("IP address")
+                }
+
+                fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+                where
+                    E: Error,
+                {
+                    s.parse().map_err(Error::custom)
+                }
+            }
+
+            deserializer.deserialize_str(IpAddrVisitor)
         } else {
             use lib::net::IpAddr;
             deserialize_enum!{
                 IpAddr IpAddrKind (V4; b"V4"; 0, V6; b"V6"; 1)
                 "`V4` or `V6`",
                 deserializer
             }
         }
     }
 }
 
 #[cfg(feature = "std")]
-parse_ip_impl!(net::Ipv4Addr; 4);
+parse_ip_impl!("IPv4 address" net::Ipv4Addr; 4);
 
 #[cfg(feature = "std")]
-parse_ip_impl!(net::Ipv6Addr; 16);
+parse_ip_impl!("IPv6 address" net::Ipv6Addr; 16);
 
 #[cfg(feature = "std")]
 macro_rules! parse_socket_impl {
-    ($ty:ty, $new:expr) => {
+    ($expecting:tt $ty:ty, $new:expr) => {
         impl<'de> Deserialize<'de> for $ty {
             fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
             where
                 D: Deserializer<'de>,
             {
                 if deserializer.is_human_readable() {
-                    let s = try!(String::deserialize(deserializer));
-                    s.parse().map_err(Error::custom)
+                    struct SocketAddrVisitor;
+
+                    impl<'de> Visitor<'de> for SocketAddrVisitor {
+                        type Value = $ty;
+
+                        fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                            formatter.write_str($expecting)
+                        }
+
+                        fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+                        where
+                            E: Error,
+                        {
+                            s.parse().map_err(Error::custom)
+                        }
+                    }
+
+                    deserializer.deserialize_str(SocketAddrVisitor)
                 } else {
                     <(_, u16)>::deserialize(deserializer).map(|(ip, port)| $new(ip, port))
                 }
             }
         }
     };
 }
 
 #[cfg(feature = "std")]
 impl<'de> Deserialize<'de> for net::SocketAddr {
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
     where
         D: Deserializer<'de>,
     {
         if deserializer.is_human_readable() {
-            let s = try!(String::deserialize(deserializer));
-            s.parse().map_err(Error::custom)
+            struct SocketAddrVisitor;
+
+            impl<'de> Visitor<'de> for SocketAddrVisitor {
+                type Value = net::SocketAddr;
+
+                fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+                    formatter.write_str("socket address")
+                }
+
+                fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+                where
+                    E: Error,
+                {
+                    s.parse().map_err(Error::custom)
+                }
+            }
+
+            deserializer.deserialize_str(SocketAddrVisitor)
         } else {
             use lib::net::SocketAddr;
             deserialize_enum!{
                 SocketAddr SocketAddrKind (V4; b"V4"; 0, V6; b"V6"; 1)
                 "`V4` or `V6`",
                 deserializer
             }
         }
     }
 }
 
 #[cfg(feature = "std")]
-parse_socket_impl!(net::SocketAddrV4, net::SocketAddrV4::new);
+parse_socket_impl!("IPv4 socket address" net::SocketAddrV4, net::SocketAddrV4::new);
 
 #[cfg(feature = "std")]
-parse_socket_impl!(net::SocketAddrV6, |ip, port| net::SocketAddrV6::new(
+parse_socket_impl!("IPv6 socket address" net::SocketAddrV6, |ip, port| net::SocketAddrV6::new(
     ip, port, 0, 0
 ));
 
 ////////////////////////////////////////////////////////////////////////////////
 
 #[cfg(feature = "std")]
 struct PathVisitor;
 
@@ -1406,17 +1566,18 @@ impl<'de> Visitor<'de> for OsStringVisit
     #[cfg(windows)]
     fn visit_enum<A>(self, data: A) -> Result<Self::Value, A::Error>
     where
         A: EnumAccess<'de>,
     {
         use std::os::windows::ffi::OsStringExt;
 
         match try!(data.variant()) {
-            (OsStringKind::Windows, v) => v.newtype_variant::<Vec<u16>>()
+            (OsStringKind::Windows, v) => v
+                .newtype_variant::<Vec<u16>>()
                 .map(|vec| OsString::from_wide(&vec)),
             (OsStringKind::Unix, _) => Err(Error::custom(
                 "cannot deserialize Unix OS string on Windows",
             )),
         }
     }
 }
 
@@ -1436,29 +1597,29 @@ impl<'de> Deserialize<'de> for OsString 
 forwarded_impl!((T), Box<T>, Box::new);
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 forwarded_impl!((T), Box<[T]>, Vec::into_boxed_slice);
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 forwarded_impl!((), Box<str>, String::into_boxed_str);
 
-#[cfg(all(not(feature = "unstable"), feature = "rc", any(feature = "std", feature = "alloc")))]
+#[cfg(all(not(de_rc_dst), feature = "rc", any(feature = "std", feature = "alloc")))]
 forwarded_impl! {
     /// This impl requires the [`"rc"`] Cargo feature of Serde.
     ///
     /// Deserializing a data structure containing `Arc` will not attempt to
     /// deduplicate `Arc` references to the same data. Every deserialized `Arc`
     /// will end up with a strong count of 1.
     ///
     /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
     (T), Arc<T>, Arc::new
 }
 
-#[cfg(all(not(feature = "unstable"), feature = "rc", any(feature = "std", feature = "alloc")))]
+#[cfg(all(not(de_rc_dst), feature = "rc", any(feature = "std", feature = "alloc")))]
 forwarded_impl! {
     /// This impl requires the [`"rc"`] Cargo feature of Serde.
     ///
     /// Deserializing a data structure containing `Rc` will not attempt to
     /// deduplicate `Rc` references to the same data. Every deserialized `Rc`
     /// will end up with a strong count of 1.
     ///
     /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
@@ -1515,17 +1676,17 @@ where
     {
         try!(Option::<T>::deserialize(deserializer));
         Ok(ArcWeak::new())
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
-#[cfg(all(feature = "unstable", feature = "rc", any(feature = "std", feature = "alloc")))]
+#[cfg(all(de_rc_dst, feature = "rc", any(feature = "std", feature = "alloc")))]
 macro_rules! box_forwarded_impl {
     (
         $(#[doc = $doc:tt])*
         $t:ident
     ) => {
         $(#[doc = $doc])*
         impl<'de, T: ?Sized> Deserialize<'de> for $t<T>
         where
@@ -1536,29 +1697,29 @@ macro_rules! box_forwarded_impl {
                 D: Deserializer<'de>,
             {
                 Box::deserialize(deserializer).map(Into::into)
             }
         }
     };
 }
 
-#[cfg(all(feature = "unstable", feature = "rc", any(feature = "std", feature = "alloc")))]
+#[cfg(all(de_rc_dst, feature = "rc", any(feature = "std", feature = "alloc")))]
 box_forwarded_impl! {
     /// This impl requires the [`"rc"`] Cargo feature of Serde.
     ///
     /// Deserializing a data structure containing `Rc` will not attempt to
     /// deduplicate `Rc` references to the same data. Every deserialized `Rc`
     /// will end up with a strong count of 1.
     ///
     /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
     Rc
 }
 
-#[cfg(all(feature = "unstable", feature = "rc", any(feature = "std", feature = "alloc")))]
+#[cfg(all(de_rc_dst, feature = "rc", any(feature = "std", feature = "alloc")))]
 box_forwarded_impl! {
     /// This impl requires the [`"rc"`] Cargo feature of Serde.
     ///
     /// Deserializing a data structure containing `Arc` will not attempt to
     /// deduplicate `Arc` references to the same data. Every deserialized `Arc`
     /// will end up with a strong count of 1.
     ///
     /// [`"rc"`]: https://serde.rs/feature-flags.html#-features-rc
@@ -1592,17 +1753,17 @@ forwarded_impl!((T), RwLock<T>, RwLock::
 // This is a cleaned-up version of the impl generated by:
 //
 //     #[derive(Deserialize)]
 //     #[serde(deny_unknown_fields)]
 //     struct Duration {
 //         secs: u64,
 //         nanos: u32,
 //     }
-#[cfg(feature = "std")]
+#[cfg(any(core_duration, feature = "std"))]
 impl<'de> Deserialize<'de> for Duration {
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
     where
         D: Deserializer<'de>,
     {
         // If this were outside of the serde crate, it would just use:
         //
         //    #[derive(Deserialize)]
@@ -1640,17 +1801,17 @@ impl<'de> Deserialize<'de> for Duration 
                     fn visit_bytes<E>(self, value: &[u8]) -> Result<Self::Value, E>
                     where
                         E: Error,
                     {
                         match value {
                             b"secs" => Ok(Field::Secs),
                             b"nanos" => Ok(Field::Nanos),
                             _ => {
-                                let value = String::from_utf8_lossy(value);
+                                let value = ::export::from_utf8_lossy(value);
                                 Err(Error::unknown_field(&value, FIELDS))
                             }
                         }
                     }
                 }
 
                 deserializer.deserialize_identifier(FieldVisitor)
             }
@@ -2001,26 +2162,26 @@ where
             },
         )
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 macro_rules! nonzero_integers {
-    ( $( $T: ty, )+ ) => {
+    ( $( $T: ident, )+ ) => {
         $(
-            #[cfg(feature = "unstable")]
-            impl<'de> Deserialize<'de> for $T {
+            #[cfg(num_nonzero)]
+            impl<'de> Deserialize<'de> for num::$T {
                 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
                 where
                     D: Deserializer<'de>,
                 {
                     let value = try!(Deserialize::deserialize(deserializer));
-                    match <$T>::new(value) {
+                    match <num::$T>::new(value) {
                         Some(nonzero) => Ok(nonzero),
                         None => Err(Error::custom("expected a non-zero value")),
                     }
                 }
             }
         )+
     };
 }
--- a/third_party/rust/serde/src/de/mod.rs
+++ b/third_party/rust/serde/src/de/mod.rs
@@ -47,31 +47,31 @@
 //! # Implementations of Deserialize provided by Serde
 //!
 //! This is a slightly different set of types than what is supported for
 //! serialization. Some types can be serialized by Serde but not deserialized.
 //! One example is `OsStr`.
 //!
 //!  - **Primitive types**:
 //!    - bool
-//!    - i8, i16, i32, i64, isize
-//!    - u8, u16, u32, u64, usize
+//!    - i8, i16, i32, i64, i128, isize
+//!    - u8, u16, u32, u64, u128, usize
 //!    - f32, f64
 //!    - char
 //!  - **Compound types**:
-//!    - [T; 0] through [T; 32]
+//!    - \[T; 0\] through \[T; 32\]
 //!    - tuples up to size 16
 //!  - **Common standard library types**:
 //!    - String
 //!    - Option\<T\>
 //!    - Result\<T, E\>
 //!    - PhantomData\<T\>
 //!  - **Wrapper types**:
 //!    - Box\<T\>
-//!    - Box\<[T]\>
+//!    - Box\<\[T\]\>
 //!    - Box\<str\>
 //!    - Rc\<T\>
 //!    - Arc\<T\>
 //!    - Cow\<'a, T\>
 //!    - Cell\<T\>
 //!    - RefCell\<T\>
 //!    - Mutex\<T\>
 //!    - RwLock\<T\>
@@ -81,28 +81,29 @@
 //!    - BinaryHeap\<T\>
 //!    - HashMap\<K, V, H\>
 //!    - HashSet\<T, H\>
 //!    - LinkedList\<T\>
 //!    - VecDeque\<T\>
 //!    - Vec\<T\>
 //!  - **Zero-copy types**:
 //!    - &str
-//!    - &[u8]
+//!    - &\[u8\]
 //!  - **FFI types**:
 //!    - CString
 //!    - Box\<CStr\>
 //!    - OsString
 //!  - **Miscellaneous standard library types**:
 //!    - Duration
 //!    - SystemTime
 //!    - Path
 //!    - PathBuf
 //!    - Range\<T\>
-//!    - num::NonZero* (unstable)
+//!    - num::NonZero*
+//!    - `!` *(unstable)*
 //!  - **Net types**:
 //!    - IpAddr
 //!    - Ipv4Addr
 //!    - Ipv6Addr
 //!    - SocketAddr
 //!    - SocketAddrV4
 //!    - SocketAddrV6
 //!
@@ -143,16 +144,23 @@ macro_rules! declare_error_trait {
         /// general-purpose deserialization errors as well as errors specific to the
         /// particular deserialization format. For example the `Error` type of
         /// `serde_json` can represent errors like an invalid JSON escape sequence or an
         /// unterminated string literal, in addition to the error cases that are part of
         /// this trait.
         ///
         /// Most deserializers should only need to provide the `Error::custom` method
         /// and inherit the default behavior for the other methods.
+        ///
+        /// # Example implementation
+        ///
+        /// The [example data format] presented on the website shows an error
+        /// type appropriate for a basic JSON data format.
+        ///
+        /// [example data format]: https://serde.rs/data-format.html
         pub trait Error: Sized $(+ $($supertrait)::+)* {
             /// Raised when there is general error when deserializing a type.
             ///
             /// The message should not be capitalized and should not end with a period.
             ///
             /// ```rust
             /// # use std::str::FromStr;
             /// #
@@ -499,16 +507,24 @@ impl<'a> Display for Expected + 'a {
 /// Third-party crates may provide `Deserialize` implementations for types that
 /// they expose. For example the `linked-hash-map` crate provides a
 /// `LinkedHashMap<K, V>` type that is deserializable by Serde because the crate
 /// provides an implementation of `Deserialize` for it.
 ///
 /// [de]: https://docs.serde.rs/serde/de/index.html
 /// [codegen]: https://serde.rs/codegen.html
 /// [impl-deserialize]: https://serde.rs/impl-deserialize.html
+///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed by `Self` when deserialized. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
 pub trait Deserialize<'de>: Sized {
     /// Deserialize this value from the given Serde deserializer.
     ///
     /// See the [Implementing `Deserialize`][impl-deserialize] section of the
     /// manual for more information about how to implement this method.
     ///
     /// [impl-deserialize]: https://serde.rs/impl-deserialize.html
     fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
@@ -563,16 +579,24 @@ pub trait Deserialize<'de>: Sized {
 ///     T: Deserialize<'a>;
 ///
 /// fn from_reader<R, T>(rdr: R) -> Result<T>
 /// where
 ///     R: Read,
 ///     T: DeserializeOwned;
 /// # }
 /// ```
+///
+/// # Lifetime
+///
+/// The relationship between `Deserialize` and `DeserializeOwned` in trait
+/// bounds is explained in more detail on the page [Understanding deserializer
+/// lifetimes].
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
 pub trait DeserializeOwned: for<'de> Deserialize<'de> {}
 impl<T> DeserializeOwned for T
 where
     T: for<'de> Deserialize<'de>,
 {
 }
 
 /// `DeserializeSeed` is the stateful form of the `Deserialize` trait. If you
@@ -613,16 +637,24 @@ where
 /// #     unimplemented!()
 /// # }
 /// ```
 ///
 /// In practice the majority of deserialization is stateless. An API expecting a
 /// seed can be appeased by passing `std::marker::PhantomData` as a seed in the
 /// case of stateless deserialization.
 ///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed by `Self::Value` when deserialized. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
 /// # Example
 ///
 /// Suppose we have JSON that looks like `[[1, 2], [3, 4, 5], [6]]` and we need
 /// to deserialize it into a flat representation like `vec![1, 2, 3, 4, 5, 6]`.
 /// Allocating a brand new `Vec<T>` for each subarray would be slow. Instead we
 /// would like to allocate a single `Vec<T>` and then deserialize each subarray
 /// into it. This requires stateful deserialization using the `DeserializeSeed`
 /// trait.
@@ -751,38 +783,38 @@ where
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A **data format** that can deserialize any data structure supported by
 /// Serde.
 ///
-/// The role of this trait is to define the deserialization half of the Serde
-/// data model, which is a way to categorize every Rust data type into one of 27
-/// possible types. Each method of the `Serializer` trait corresponds to one of
-/// the types of the data model.
+/// The role of this trait is to define the deserialization half of the [Serde
+/// data model], which is a way to categorize every Rust data type into one of
+/// 29 possible types. Each method of the `Serializer` trait corresponds to one
+/// of the types of the data model.
 ///
 /// Implementations of `Deserialize` map themselves into this data model by
 /// passing to the `Deserializer` a `Visitor` implementation that can receive
 /// these various types.
 ///
 /// The types that make up the Serde data model are:
 ///
-///  - **12 primitive types**
+///  - **14 primitive types**
 ///    - bool
-///    - i8, i16, i32, i64
-///    - u8, u16, u32, u64
+///    - i8, i16, i32, i64, i128
+///    - u8, u16, u32, u64, u128
 ///    - f32, f64
 ///    - char
 ///  - **string**
 ///    - UTF-8 bytes with a length and no null terminator.
 ///    - When serializing, all strings are handled equally. When deserializing,
 ///      there are three flavors of strings: transient, owned, and borrowed.
-///  - **byte array** - [u8]
+///  - **byte array** - \[u8\]
 ///    - Similar to strings, during deserialization byte arrays can be transient,
 ///      owned, or borrowed.
 ///  - **option**
 ///    - Either none or some value.
 ///  - **unit**
 ///    - The type of `()` in Rust. It represents an anonymous value containing no
 ///      data.
 ///  - **unit_struct**
@@ -836,16 +868,33 @@ where
 ///    able to deserialize something like `serde_json::Value` which relies on
 ///    `Deserializer::deserialize_any`.
 ///
 /// When implementing `Deserialize`, you should avoid relying on
 /// `Deserializer::deserialize_any` unless you need to be told by the Deserializer
 /// what type is in the input. Know that relying on `Deserializer::deserialize_any`
 /// means your data type will be able to deserialize from self-describing
 /// formats only, ruling out Bincode and many others.
+///
+/// [Serde data model]: https://serde.rs/data-model.html
+///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed from the input when deserializing. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website contains example code for
+/// a basic JSON `Deserializer`.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait Deserializer<'de>: Sized {
     /// The error type that can be returned if some error occurs during
     /// deserialization.
     type Error: Error;
 
     /// Require the `Deserializer` to figure out how to drive the visitor based
     /// on what data type is in the input.
     ///
@@ -879,16 +928,30 @@ pub trait Deserializer<'de>: Sized {
     where
         V: Visitor<'de>;
 
     /// Hint that the `Deserialize` type is expecting an `i64` value.
     fn deserialize_i64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>;
 
+    serde_if_integer128! {
+        /// Hint that the `Deserialize` type is expecting an `i128` value.
+        ///
+        /// This method is available only on Rust compiler versions >=1.26. The
+        /// default behavior unconditionally returns an error.
+        fn deserialize_i128<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+        where
+            V: Visitor<'de>
+        {
+            let _ = visitor;
+            Err(Error::custom("i128 is not supported"))
+        }
+    }
+
     /// Hint that the `Deserialize` type is expecting a `u8` value.
     fn deserialize_u8<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>;
 
     /// Hint that the `Deserialize` type is expecting a `u16` value.
     fn deserialize_u16<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
@@ -899,16 +962,30 @@ pub trait Deserializer<'de>: Sized {
     where
         V: Visitor<'de>;
 
     /// Hint that the `Deserialize` type is expecting a `u64` value.
     fn deserialize_u64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>;
 
+    serde_if_integer128! {
+        /// Hint that the `Deserialize` type is expecting an `u128` value.
+        ///
+        /// This method is available only on Rust compiler versions >=1.26. The
+        /// default behavior unconditionally returns an error.
+        fn deserialize_u128<V>(self, visitor: V) -> Result<V::Value, Self::Error>
+        where
+            V: Visitor<'de>
+        {
+            let _ = visitor;
+            Err(Error::custom("u128 is not supported"))
+        }
+    }
+
     /// Hint that the `Deserialize` type is expecting a `f32` value.
     fn deserialize_f32<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>;
 
     /// Hint that the `Deserialize` type is expecting a `f64` value.
     fn deserialize_f64<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
@@ -1131,16 +1208,26 @@ pub trait Deserializer<'de>: Sized {
         true
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// This trait represents a visitor that walks through a deserializer.
 ///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the requirement for lifetime of data
+/// that may be borrowed by `Self::Value`. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
+/// # Example
+///
 /// ```rust
 /// # use std::fmt;
 /// #
 /// # use serde::de::{self, Unexpected, Visitor};
 /// #
 /// /// A visitor that deserializes a long string - a string containing at least
 /// /// some minimum number of bytes.
 /// struct LongString {
@@ -1245,16 +1332,30 @@ pub trait Visitor<'de>: Sized {
     /// The default implementation fails with a type error.
     fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
     where
         E: Error,
     {
         Err(Error::invalid_type(Unexpected::Signed(v), &self))
     }
 
+    serde_if_integer128! {
+        /// The input contains a `i128`.
+        ///
+        /// This method is available only on Rust compiler versions >=1.26. The
+        /// default implementation fails with a type error.
+        fn visit_i128<E>(self, v: i128) -> Result<Self::Value, E>
+        where
+            E: Error,
+        {
+            let _ = v;
+            Err(Error::invalid_type(Unexpected::Other("i128"), &self))
+        }
+    }
+
     /// The input contains a `u8`.
     ///
     /// The default implementation forwards to [`visit_u64`].
     ///
     /// [`visit_u64`]: #method.visit_u64
     fn visit_u8<E>(self, v: u8) -> Result<Self::Value, E>
     where
         E: Error,
@@ -1291,16 +1392,30 @@ pub trait Visitor<'de>: Sized {
     /// The default implementation fails with a type error.
     fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
     where
         E: Error,
     {
         Err(Error::invalid_type(Unexpected::Unsigned(v), &self))
     }
 
+    serde_if_integer128! {
+        /// The input contains a `u128`.
+        ///
+        /// This method is available only on Rust compiler versions >=1.26. The
+        /// default implementation fails with a type error.
+        fn visit_u128<E>(self, v: u128) -> Result<Self::Value, E>
+        where
+            E: Error,
+        {
+            let _ = v;
+            Err(Error::invalid_type(Unexpected::Other("u128"), &self))
+        }
+    }
+
     /// The input contains an `f32`.
     ///
     /// The default implementation forwards to [`visit_f64`].
     ///
     /// [`visit_f64`]: #method.visit_f64
     fn visit_f32<E>(self, v: f32) -> Result<Self::Value, E>
     where
         E: Error,
@@ -1539,16 +1654,31 @@ pub trait Visitor<'de>: Sized {
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// Provides a `Visitor` access to each element of a sequence in the input.
 ///
 /// This is a trait that a `Deserializer` passes to a `Visitor` implementation,
 /// which deserializes each item in a sequence.
+///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed by deserialized sequence elements. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SeqAccess` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SeqAccess<'de> {
     /// The error type that can be returned if some error occurs during
     /// deserialization.
     type Error: Error;
 
     /// This returns `Ok(Some(value))` for the next value in the sequence, or
     /// `Ok(None)` if there are no more remaining items.
     ///
@@ -1606,16 +1736,31 @@ where
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// Provides a `Visitor` access to each entry of a map in the input.
 ///
 /// This is a trait that a `Deserializer` passes to a `Visitor` implementation.
+///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed by deserialized map entries. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `MapAccess` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait MapAccess<'de> {
     /// The error type that can be returned if some error occurs during
     /// deserialization.
     type Error: Error;
 
     /// This returns `Ok(Some(key))` for the next key in the map, or `Ok(None)`
     /// if there are no more remaining entries.
     ///
@@ -1783,16 +1928,31 @@ where
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// Provides a `Visitor` access to the data of an enum in the input.
 ///
 /// `EnumAccess` is created by the `Deserializer` and passed to the
 /// `Visitor` in order to identify which variant of an enum to deserialize.
+///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed by the deserialized enum variant. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `EnumAccess` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait EnumAccess<'de>: Sized {
     /// The error type that can be returned if some error occurs during
     /// deserialization.
     type Error: Error;
     /// The `Visitor` that will be used to deserialize the content of the enum
     /// variant.
     type Variant: VariantAccess<'de, Error = Self::Error>;
 
@@ -1815,16 +1975,31 @@ pub trait EnumAccess<'de>: Sized {
     {
         self.variant_seed(PhantomData)
     }
 }
 
 /// `VariantAccess` is a visitor that is created by the `Deserializer` and
 /// passed to the `Deserialize` to deserialize the content of a particular enum
 /// variant.
+///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed by the deserialized enum variant. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `VariantAccess` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait VariantAccess<'de>: Sized {
     /// The error type that can be returned if some error occurs during
     /// deserialization. Must match the error type of our `EnumAccess`.
     type Error: Error;
 
     /// Called when deserializing a variant with no values.
     ///
     /// If the data contains a different type of variant, the following
@@ -2018,16 +2193,26 @@ pub trait VariantAccess<'de>: Sized {
         V: Visitor<'de>;
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// Converts an existing value into a `Deserializer` from which other values can
 /// be deserialized.
 ///
+/// # Lifetime
+///
+/// The `'de` lifetime of this trait is the lifetime of data that may be
+/// borrowed from the resulting `Deserializer`. See the page [Understanding
+/// deserializer lifetimes] for a more detailed explanation of these lifetimes.
+///
+/// [Understanding deserializer lifetimes]: https://serde.rs/lifetimes.html
+///
+/// # Example
+///
 /// ```rust
 /// #[macro_use]
 /// extern crate serde_derive;
 ///
 /// extern crate serde;
 ///
 /// use std::str::FromStr;
 /// use serde::de::{value, Deserialize, IntoDeserializer};
--- a/third_party/rust/serde/src/de/value.rs
+++ b/third_party/rust/serde/src/de/value.rs
@@ -39,16 +39,32 @@ use lib::*;
 
 use self::private::{First, Second};
 use de::{self, Expected, IntoDeserializer, SeqAccess};
 use private::de::size_hint;
 use ser;
 
 ////////////////////////////////////////////////////////////////////////////////
 
+// For structs that contain a PhantomData. We do not want the trait
+// bound `E: Clone` inferred by derive(Clone).
+macro_rules! impl_copy_clone {
+    ($ty:ident $(<$lifetime:tt>)*) => {
+        impl<$($lifetime,)* E> Copy for $ty<$($lifetime,)* E> {}
+
+        impl<$($lifetime,)* E> Clone for $ty<$($lifetime,)* E> {
+            fn clone(&self) -> Self {
+                *self
+            }
+        }
+    };
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
 /// A minimal representation of all possible errors that can occur using the
 /// `IntoDeserializer` trait.
 #[derive(Clone, Debug, PartialEq)]
 pub struct Error {
     err: ErrorImpl,
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
@@ -119,31 +135,33 @@ where
     fn into_deserializer(self) -> UnitDeserializer<E> {
         UnitDeserializer {
             marker: PhantomData,
         }
     }
 }
 
 /// A deserializer holding a `()`.
-#[derive(Clone, Debug)]
+#[derive(Debug)]
 pub struct UnitDeserializer<E> {
     marker: PhantomData<E>,
 }
 
+impl_copy_clone!(UnitDeserializer);
+
 impl<'de, E> de::Deserializer<'de> for UnitDeserializer<E>
 where
     E: de::Error,
 {
     type Error = E;
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf unit unit_struct newtype_struct seq tuple tuple_struct map
-        struct enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf unit unit_struct newtype_struct seq tuple tuple_struct
+        map struct enum identifier ignored_any
     }
 
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: de::Visitor<'de>,
     {
         visitor.visit_unit()
     }
@@ -153,26 +171,70 @@ where
         V: de::Visitor<'de>,
     {
         visitor.visit_none()
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
+/// A deserializer that cannot be instantiated.
+#[cfg(feature = "unstable")]
+pub struct NeverDeserializer<E> {
+    never: !,
+    marker: PhantomData<E>,
+}
+
+#[cfg(feature = "unstable")]
+impl<'de, E> IntoDeserializer<'de, E> for !
+where
+    E: de::Error,
+{
+    type Deserializer = NeverDeserializer<E>;
+
+    fn into_deserializer(self) -> Self::Deserializer {
+        self
+    }
+}
+
+#[cfg(feature = "unstable")]
+impl<'de, E> de::Deserializer<'de> for NeverDeserializer<E>
+where
+    E: de::Error,
+{
+    type Error = E;
+
+    fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value, Self::Error>
+    where
+        V: de::Visitor<'de>,
+    {
+        self.never
+    }
+
+    forward_to_deserialize_any! {
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct enum identifier ignored_any
+    }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
 macro_rules! primitive_deserializer {
     ($ty:ty, $doc:tt, $name:ident, $method:ident $($cast:tt)*) => {
         #[doc = "A deserializer holding"]
         #[doc = $doc]
-        #[derive(Clone, Debug)]
+        #[derive(Debug)]
         pub struct $name<E> {
             value: $ty,
             marker: PhantomData<E>
         }
 
+        impl_copy_clone!($name);
+
         impl<'de, E> IntoDeserializer<'de, E> for $ty
         where
             E: de::Error,
         {
             type Deserializer = $name<E>;
 
             fn into_deserializer(self) -> $name<E> {
                 $name {
@@ -184,19 +246,19 @@ macro_rules! primitive_deserializer {
 
         impl<'de, E> de::Deserializer<'de> for $name<E>
         where
             E: de::Error,
         {
             type Error = E;
 
             forward_to_deserialize_any! {
-                bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-                byte_buf option unit unit_struct newtype_struct seq tuple
-                tuple_struct map struct enum identifier ignored_any
+                bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str
+                string bytes byte_buf option unit unit_struct newtype_struct seq
+                tuple tuple_struct map struct enum identifier ignored_any
             }
 
             fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
             where
                 V: de::Visitor<'de>,
             {
                 visitor.$method(self.value $($cast)*)
             }
@@ -213,23 +275,30 @@ primitive_deserializer!(isize, "an `isiz
 primitive_deserializer!(u8, "a `u8`.", U8Deserializer, visit_u8);
 primitive_deserializer!(u16, "a `u16`.", U16Deserializer, visit_u16);
 primitive_deserializer!(u64, "a `u64`.", U64Deserializer, visit_u64);
 primitive_deserializer!(usize, "a `usize`.", UsizeDeserializer, visit_u64 as u64);
 primitive_deserializer!(f32, "an `f32`.", F32Deserializer, visit_f32);
 primitive_deserializer!(f64, "an `f64`.", F64Deserializer, visit_f64);
 primitive_deserializer!(char, "a `char`.", CharDeserializer, visit_char);
 
+serde_if_integer128! {
+    primitive_deserializer!(i128, "an `i128`.", I128Deserializer, visit_i128);
+    primitive_deserializer!(u128, "a `u128`.", U128Deserializer, visit_u128);
+}
+
 /// A deserializer holding a `u32`.
-#[derive(Clone, Debug)]
+#[derive(Debug)]
 pub struct U32Deserializer<E> {
     value: u32,
     marker: PhantomData<E>,
 }
 
+impl_copy_clone!(U32Deserializer);
+
 impl<'de, E> IntoDeserializer<'de, E> for u32
 where
     E: de::Error,
 {
     type Deserializer = U32Deserializer<E>;
 
     fn into_deserializer(self) -> U32Deserializer<E> {
         U32Deserializer {
@@ -241,19 +310,19 @@ where
 
 impl<'de, E> de::Deserializer<'de> for U32Deserializer<E>
 where
     E: de::Error,
 {
     type Error = E;
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct identifier ignored_any
     }
 
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: de::Visitor<'de>,
     {
         visitor.visit_u32(self.value)
     }
@@ -286,22 +355,24 @@ where
     {
         seed.deserialize(self).map(private::unit_only)
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A deserializer holding a `&str`.
-#[derive(Clone, Debug)]
+#[derive(Debug)]
 pub struct StrDeserializer<'a, E> {
     value: &'a str,
     marker: PhantomData<E>,
 }
 
+impl_copy_clone!(StrDeserializer<'de>);
+
 impl<'de, 'a, E> IntoDeserializer<'de, E> for &'a str
 where
     E: de::Error,
 {
     type Deserializer = StrDeserializer<'a, E>;
 
     fn into_deserializer(self) -> StrDeserializer<'a, E> {
         StrDeserializer {
@@ -334,19 +405,19 @@ where
         V: de::Visitor<'de>,
     {
         let _ = name;
         let _ = variants;
         visitor.visit_enum(self)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct identifier ignored_any
     }
 }
 
 impl<'de, 'a, E> de::EnumAccess<'de> for StrDeserializer<'a, E>
 where
     E: de::Error,
 {
     type Error = E;
@@ -359,22 +430,24 @@ where
         seed.deserialize(self).map(private::unit_only)
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A deserializer holding a `&str` with a lifetime tied to another
 /// deserializer.
-#[derive(Clone, Debug)]
+#[derive(Debug)]
 pub struct BorrowedStrDeserializer<'de, E> {
     value: &'de str,
     marker: PhantomData<E>,
 }
 
+impl_copy_clone!(BorrowedStrDeserializer<'de>);
+
 impl<'de, E> BorrowedStrDeserializer<'de, E> {
     /// Create a new borrowed deserializer from the given string.
     pub fn new(value: &'de str) -> BorrowedStrDeserializer<'de, E> {
         BorrowedStrDeserializer {
             value: value,
             marker: PhantomData,
         }
     }
@@ -403,19 +476,19 @@ where
         V: de::Visitor<'de>,
     {
         let _ = name;
         let _ = variants;
         visitor.visit_enum(self)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct identifier ignored_any
     }
 }
 
 impl<'de, E> de::EnumAccess<'de> for BorrowedStrDeserializer<'de, E>
 where
     E: de::Error,
 {
     type Error = E;
@@ -428,23 +501,33 @@ where
         seed.deserialize(self).map(private::unit_only)
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A deserializer holding a `String`.
 #[cfg(any(feature = "std", feature = "alloc"))]
-#[derive(Clone, Debug)]
+#[derive(Debug)]
 pub struct StringDeserializer<E> {
     value: String,
     marker: PhantomData<E>,
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
+impl<E> Clone for StringDeserializer<E> {
+    fn clone(&self) -> Self {
+        StringDeserializer {
+            value: self.value.clone(),
+            marker: PhantomData,
+        }
+    }
+}
+
+#[cfg(any(feature = "std", feature = "alloc"))]
 impl<'de, E> IntoDeserializer<'de, E> for String
 where
     E: de::Error,
 {
     type Deserializer = StringDeserializer<E>;
 
     fn into_deserializer(self) -> StringDeserializer<E> {
         StringDeserializer {
@@ -478,19 +561,19 @@ where
         V: de::Visitor<'de>,
     {
         let _ = name;
         let _ = variants;
         visitor.visit_enum(self)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct identifier ignored_any
     }
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 impl<'de, 'a, E> de::EnumAccess<'de> for StringDeserializer<E>
 where
     E: de::Error,
 {
@@ -504,23 +587,33 @@ where
         seed.deserialize(self).map(private::unit_only)
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A deserializer holding a `Cow<str>`.
 #[cfg(any(feature = "std", feature = "alloc"))]
-#[derive(Clone, Debug)]
+#[derive(Debug)]
 pub struct CowStrDeserializer<'a, E> {
     value: Cow<'a, str>,
     marker: PhantomData<E>,
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
+impl<'a, E> Clone for CowStrDeserializer<'a, E> {
+    fn clone(&self) -> Self {
+        CowStrDeserializer {
+            value: self.value.clone(),
+            marker: PhantomData,
+        }
+    }
+}
+
+#[cfg(any(feature = "std", feature = "alloc"))]
 impl<'de, 'a, E> IntoDeserializer<'de, E> for Cow<'a, str>
 where
     E: de::Error,
 {
     type Deserializer = CowStrDeserializer<'a, E>;
 
     fn into_deserializer(self) -> CowStrDeserializer<'a, E> {
         CowStrDeserializer {
@@ -557,19 +650,19 @@ where
         V: de::Visitor<'de>,
     {
         let _ = name;
         let _ = variants;
         visitor.visit_enum(self)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct identifier ignored_any
     }
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 impl<'de, 'a, E> de::EnumAccess<'de> for CowStrDeserializer<'a, E>
 where
     E: de::Error,
 {
@@ -583,22 +676,24 @@ where
         seed.deserialize(self).map(private::unit_only)
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A deserializer holding a `&[u8]` with a lifetime tied to another
 /// deserializer.
-#[derive(Clone, Debug)]
+#[derive(Debug)]
 pub struct BorrowedBytesDeserializer<'de, E> {
     value: &'de [u8],
     marker: PhantomData<E>,
 }
 
+impl_copy_clone!(BorrowedBytesDeserializer<'de>);
+
 impl<'de, E> BorrowedBytesDeserializer<'de, E> {
     /// Create a new borrowed deserializer from the given byte slice.
     pub fn new(value: &'de [u8]) -> BorrowedBytesDeserializer<'de, E> {
         BorrowedBytesDeserializer {
             value: value,
             marker: PhantomData,
         }
     }
@@ -613,19 +708,19 @@ where
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: de::Visitor<'de>,
     {
         visitor.visit_borrowed_bytes(self.value)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct identifier ignored_any enum
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct identifier ignored_any enum
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A deserializer that iterates over a sequence.
 #[derive(Clone, Debug)]
 pub struct SeqDeserializer<I, E> {
@@ -683,19 +778,19 @@ where
         V: de::Visitor<'de>,
     {
         let v = try!(visitor.visit_seq(&mut self));
         try!(self.end());
         Ok(v)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct enum identifier ignored_any
     }
 }
 
 impl<'de, I, T, E> de::SeqAccess<'de> for SeqDeserializer<I, E>
 where
     I: Iterator<Item = T>,
     T: IntoDeserializer<'de, E>,
     E: de::Error,
@@ -798,19 +893,19 @@ where
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: de::Visitor<'de>,
     {
         visitor.visit_seq(self.seq)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct enum identifier ignored_any
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A deserializer that iterates over a map.
 pub struct MapDeserializer<'de, I, E>
 where
@@ -912,19 +1007,19 @@ where
     where
         V: de::Visitor<'de>,
     {
         let _ = len;
         self.deserialize_seq(visitor)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct tuple_struct map struct
-        enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct tuple_struct map
+        struct enum identifier ignored_any
     }
 }
 
 impl<'de, I, E> de::MapAccess<'de> for MapDeserializer<'de, I, E>
 where
     I: Iterator,
     I::Item: private::Pair,
     First<I::Item>: IntoDeserializer<'de, E>,
@@ -1054,19 +1149,19 @@ impl<'de, A, B, E> de::Deserializer<'de>
 where
     A: IntoDeserializer<'de, E>,
     B: IntoDeserializer<'de, E>,
     E: de::Error,
 {
     type Error = E;
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct tuple_struct map struct
-        enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct tuple_struct map
+        struct enum identifier ignored_any
     }
 
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: de::Visitor<'de>,
     {
         self.deserialize_seq(visitor)
     }
@@ -1202,19 +1297,19 @@ where
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: de::Visitor<'de>,
     {
         visitor.visit_map(self.map)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct enum identifier ignored_any
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 mod private {
     use lib::*;
 
new file mode 100644
--- /dev/null
+++ b/third_party/rust/serde/src/integer128.rs
@@ -0,0 +1,86 @@
+/// Conditional compilation depending on whether Serde is built with support for
+/// 128-bit integers.
+///
+/// Data formats that wish to support Rust compiler versions older than 1.26 may
+/// place the i128 / u128 methods of their Serializer and Deserializer behind
+/// this macro.
+///
+/// Data formats that require a minimum Rust compiler version of at least 1.26
+/// do not need to bother with this macro and may assume support for 128-bit
+/// integers.
+///
+/// ```rust
+/// #[macro_use]
+/// extern crate serde;
+///
+/// use serde::Serializer;
+/// # use serde::private::ser::Error;
+/// #
+/// # struct MySerializer;
+///
+/// impl Serializer for MySerializer {
+///     type Ok = ();
+///     type Error = Error;
+///
+///     fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error> {
+///         /* ... */
+/// #         unimplemented!()
+///     }
+///
+///     /* ... */
+///
+///     serde_if_integer128! {
+///         fn serialize_i128(self, v: i128) -> Result<Self::Ok, Self::Error> {
+///             /* ... */
+/// #             unimplemented!()
+///         }
+///
+///         fn serialize_u128(self, v: u128) -> Result<Self::Ok, Self::Error> {
+///             /* ... */
+/// #             unimplemented!()
+///         }
+///     }
+/// #
+/// #     __serialize_unimplemented! {
+/// #         bool i8 i16 i32 u8 u16 u32 u64 f32 f64 char str bytes none some
+/// #         unit unit_struct unit_variant newtype_struct newtype_variant seq
+/// #         tuple tuple_struct tuple_variant map struct struct_variant
+/// #     }
+/// }
+/// #
+/// # fn main() {}
+/// ```
+///
+/// When Serde is built with support for 128-bit integers, this macro expands
+/// transparently into just the input tokens.
+///
+/// ```rust
+/// macro_rules! serde_if_integer128 {
+///     ($($tt:tt)*) => {
+///         $($tt)*
+///     };
+/// }
+/// ```
+///
+/// When built without support for 128-bit integers, this macro expands to
+/// nothing.
+///
+/// ```rust
+/// macro_rules! serde_if_integer128 {
+///     ($($tt:tt)*) => {};
+/// }
+/// ```
+#[cfg(integer128)]
+#[macro_export]
+macro_rules! serde_if_integer128 {
+    ($($tt:tt)*) => {
+        $($tt)*
+    };
+}
+
+#[cfg(not(integer128))]
+#[macro_export]
+#[doc(hidden)]
+macro_rules! serde_if_integer128 {
+    ($($tt:tt)*) => {};
+}
--- a/third_party/rust/serde/src/lib.rs
+++ b/third_party/rust/serde/src/lib.rs
@@ -47,16 +47,18 @@
 //!   size without the need for version negotiation.
 //! - [YAML], a popular human-friendly configuration language that ain't markup
 //!   language.
 //! - [MessagePack], an efficient binary format that resembles a compact JSON.
 //! - [TOML], a minimal configuration format used by [Cargo].
 //! - [Pickle], a format common in the Python world.
 //! - [Hjson], a variant of JSON designed to be readable and writable by humans.
 //! - [BSON], the data storage and network transfer format used by MongoDB.
+//! - [Avro], a binary format used within Apache Hadoop, with support for schema
+//!   definition.
 //! - [URL], the x-www-form-urlencoded format.
 //! - [XML], the flexible machine-friendly W3C standard.
 //!   *(deserialization only)*
 //! - [Envy], a way to deserialize environment variables into Rust structs.
 //!   *(deserialization only)*
 //! - [Redis], deserialize values from Redis when using [redis-rs].
 //!   *(deserialization only)*
 //!
@@ -64,34 +66,35 @@
 //! [Bincode]: https://github.com/TyOverby/bincode
 //! [CBOR]: https://github.com/pyfisch/cbor
 //! [YAML]: https://github.com/dtolnay/serde-yaml
 //! [MessagePack]: https://github.com/3Hren/msgpack-rust
 //! [TOML]: https://github.com/alexcrichton/toml-rs
 //! [Pickle]: https://github.com/birkenfeld/serde-pickle
 //! [Hjson]: https://github.com/laktak/hjson-rust
 //! [BSON]: https://github.com/zonyitoo/bson-rs
+//! [Avro]: https://github.com/flavray/avro-rs
 //! [URL]: https://github.com/nox/serde_urlencoded
 //! [XML]: https://github.com/RReverser/serde-xml-rs
 //! [Envy]: https://github.com/softprops/envy
 //! [Redis]: https://github.com/OneSignal/serde-redis
 //! [Cargo]: http://doc.crates.io/manifest.html
 //! [redis-rs]: https://crates.io/crates/redis
 
 ////////////////////////////////////////////////////////////////////////////////
 
 // Serde types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/serde/1.0.58")]
+#![doc(html_root_url = "https://docs.rs/serde/1.0.66")]
 // Support using Serde without the standard library!
 #![cfg_attr(not(feature = "std"), no_std)]
 // Unstable functionality only if the user asks for it. For tracking and
 // discussion of these features please refer to this issue:
 //
 //    https://github.com/serde-rs/serde/issues/812
-#![cfg_attr(feature = "unstable", feature(specialization))]
+#![cfg_attr(feature = "unstable", feature(specialization, never_type))]
 #![cfg_attr(feature = "alloc", feature(alloc))]
 #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
 // Whitelisted clippy lints
 #![cfg_attr(
     feature = "cargo-clippy",
     allow(
         cast_lossless, const_static_lifetime, doc_markdown, linkedlist, needless_pass_by_value,
         redundant_field_names, type_complexity, unreadable_literal, zero_prefixed_literal
@@ -125,31 +128,28 @@
 // Blacklisted Rust lints.
 #![deny(missing_docs, unused_imports)]
 
 ////////////////////////////////////////////////////////////////////////////////
 
 #[cfg(feature = "alloc")]
 extern crate alloc;
 
-#[cfg(all(feature = "unstable", feature = "std"))]
-extern crate core;
-
 /// A facade around all the types we need from the `std`, `core`, and `alloc`
 /// crates. This avoids elaborate import wrangling having to happen in every
 /// module.
 mod lib {
     mod core {
         #[cfg(not(feature = "std"))]
         pub use core::*;
         #[cfg(feature = "std")]
         pub use std::*;
     }
 
-    pub use self::core::{cmp, iter, mem, ops, slice, str};
+    pub use self::core::{cmp, iter, mem, num, ops, slice, str};
     pub use self::core::{f32, f64};
     pub use self::core::{i16, i32, i64, i8, isize};
     pub use self::core::{u16, u32, u64, u8, usize};
 
     pub use self::core::cell::{Cell, RefCell};
     pub use self::core::clone::{self, Clone};
     pub use self::core::convert::{self, From, Into};
     pub use self::core::default::{self, Default};
@@ -206,27 +206,30 @@ mod lib {
     pub use std::io::Write;
     #[cfg(feature = "std")]
     pub use std::num::Wrapping;
     #[cfg(feature = "std")]
     pub use std::path::{Path, PathBuf};
     #[cfg(feature = "std")]
     pub use std::sync::{Mutex, RwLock};
     #[cfg(feature = "std")]
-    pub use std::time::{Duration, SystemTime, UNIX_EPOCH};
+    pub use std::time::{SystemTime, UNIX_EPOCH};
 
-    #[cfg(feature = "unstable")]
-    pub use core::num::{NonZeroU16, NonZeroU32, NonZeroU64, NonZeroU8, NonZeroUsize};
+    #[cfg(any(core_duration, feature = "std"))]
+    pub use self::core::time::Duration;
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 #[macro_use]
 mod macros;
 
+#[macro_use]
+mod integer128;
+
 pub mod de;
 pub mod ser;
 
 #[doc(inline)]
 pub use de::{Deserialize, Deserializer};
 #[doc(inline)]
 pub use ser::{Serialize, Serializer};
 
--- a/third_party/rust/serde/src/macros.rs
+++ b/third_party/rust/serde/src/macros.rs
@@ -41,18 +41,18 @@
 /// fn deserialize_bool<V>(self, visitor: V) -> Result<V::Value, Self::Error>
 /// where
 ///     V: Visitor<'de>,
 /// {
 ///     self.deserialize_any(visitor)
 /// }
 /// #
 /// #     forward_to_deserialize_any! {
-/// #         i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-/// #         byte_buf option unit unit_struct newtype_struct seq tuple
+/// #         i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+/// #         bytes byte_buf option unit unit_struct newtype_struct seq tuple
 /// #         tuple_struct map struct enum identifier ignored_any
 /// #     }
 /// # }
 /// #
 /// # fn main() {}
 /// ```
 ///
 /// The `forward_to_deserialize_any!` macro implements these simple forwarding
@@ -75,18 +75,18 @@
 ///         V: Visitor<'de>,
 ///     {
 ///         /* ... */
 /// #       let _ = visitor;
 /// #       unimplemented!()
 ///     }
 ///
 ///     forward_to_deserialize_any! {
-///         bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-///         byte_buf option unit unit_struct newtype_struct seq tuple
+///         bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+///         bytes byte_buf option unit unit_struct newtype_struct seq tuple
 ///         tuple_struct map struct enum identifier ignored_any
 ///     }
 /// }
 /// #
 /// # fn main() {}
 /// ```
 ///
 /// The macro assumes the convention that your `Deserializer` lifetime parameter
@@ -111,19 +111,19 @@
 /// #     where
 /// #         W: Visitor<'q>,
 /// #     {
 /// #         unimplemented!()
 /// #     }
 /// #
 /// forward_to_deserialize_any! {
 ///     <W: Visitor<'q>>
-///     bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-///     byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-///     map struct enum identifier ignored_any
+///     bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+///     bytes byte_buf option unit unit_struct newtype_struct seq tuple
+///     tuple_struct map struct enum identifier ignored_any
 /// }
 /// # }
 /// #
 /// # fn main() {}
 /// ```
 ///
 /// [`Deserializer`]: trait.Deserializer.html
 /// [`Visitor`]: de/trait.Visitor.html
@@ -169,28 +169,38 @@ macro_rules! forward_to_deserialize_any_
         forward_to_deserialize_any_method!{deserialize_i16<$l, $v>()}
     };
     (i32<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_i32<$l, $v>()}
     };
     (i64<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_i64<$l, $v>()}
     };
+    (i128<$l:tt, $v:ident>) => {
+        serde_if_integer128! {
+            forward_to_deserialize_any_method!{deserialize_i128<$l, $v>()}
+        }
+    };
     (u8<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_u8<$l, $v>()}
     };
     (u16<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_u16<$l, $v>()}
     };
     (u32<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_u32<$l, $v>()}
     };
     (u64<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_u64<$l, $v>()}
     };
+    (u128<$l:tt, $v:ident>) => {
+        serde_if_integer128! {
+            forward_to_deserialize_any_method!{deserialize_u128<$l, $v>()}
+        }
+    };
     (f32<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_f32<$l, $v>()}
     };
     (f64<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_f64<$l, $v>()}
     };
     (char<$l:tt, $v:ident>) => {
         forward_to_deserialize_any_method!{deserialize_char<$l, $v>()}
--- a/third_party/rust/serde/src/private/de.rs
+++ b/third_party/rust/serde/src/private/de.rs
@@ -45,19 +45,19 @@ where
         fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, E>
         where
             V: Visitor<'de>,
         {
             visitor.visit_none()
         }
 
         forward_to_deserialize_any! {
-            bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-            byte_buf unit unit_struct newtype_struct seq tuple tuple_struct map
-            struct enum identifier ignored_any
+            bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+            bytes byte_buf unit unit_struct newtype_struct seq tuple
+            tuple_struct map struct enum identifier ignored_any
         }
     }
 
     let deserializer = MissingFieldDeserializer(field, PhantomData);
     Deserialize::deserialize(deserializer)
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
@@ -1613,18 +1613,18 @@ mod content {
                     Ok(ret)
                 } else {
                     Err(de::Error::invalid_length(len, &"fewer elements in array"))
                 }
             }
         }
 
         forward_to_deserialize_any! {
-            bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-            byte_buf option unit unit_struct newtype_struct seq tuple
+            bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+            bytes byte_buf option unit unit_struct newtype_struct seq tuple
             tuple_struct map struct enum identifier ignored_any
         }
     }
 
     impl<'de, E> de::SeqAccess<'de> for SeqDeserializer<'de, E>
     where
         E: de::Error,
     {
@@ -1711,18 +1711,18 @@ mod content {
         fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
         where
             V: de::Visitor<'de>,
         {
             visitor.visit_map(self)
         }
 
         forward_to_deserialize_any! {
-            bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-            byte_buf option unit unit_struct newtype_struct seq tuple
+            bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+            bytes byte_buf option unit unit_struct newtype_struct seq tuple
             tuple_struct map struct enum identifier ignored_any
         }
     }
 
     /// Not public API.
     pub struct ContentRefDeserializer<'a, 'de: 'a, E> {
         content: &'a Content<'de>,
         err: PhantomData<E>,
@@ -2301,34 +2301,35 @@ mod content {
                     Ok(ret)
                 } else {
                     Err(de::Error::invalid_length(len, &"fewer elements in array"))
                 }
             }
         }
 
         forward_to_deserialize_any! {
-            bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-            byte_buf option unit unit_struct newtype_struct seq tuple
+            bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+            bytes byte_buf option unit unit_struct newtype_struct seq tuple
             tuple_struct map struct enum identifier ignored_any
         }
     }
 
     impl<'de, 'a, E> de::SeqAccess<'de> for SeqRefDeserializer<'a, 'de, E>
     where
         E: de::Error,
     {
         type Error = E;
 
         fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
         where
             T: de::DeserializeSeed<'de>,
         {
             match self.iter.next() {
-                Some(value) => seed.deserialize(ContentRefDeserializer::new(value))
+                Some(value) => seed
+                    .deserialize(ContentRefDeserializer::new(value))
                     .map(Some),
                 None => Ok(None),
             }
         }
 
         fn size_hint(&self) -> Option<usize> {
             size_hint::from_bounds(&self.iter)
         }
@@ -2400,18 +2401,18 @@ mod content {
         fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
         where
             V: de::Visitor<'de>,
         {
             visitor.visit_map(self)
         }
 
         forward_to_deserialize_any! {
-            bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-            byte_buf option unit unit_struct newtype_struct seq tuple
+            bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+            bytes byte_buf option unit unit_struct newtype_struct seq tuple
             tuple_struct map struct enum identifier ignored_any
         }
     }
 
     impl<'de, E> de::IntoDeserializer<'de, E> for ContentDeserializer<'de, E>
     where
         E: de::Error,
     {
@@ -2572,19 +2573,19 @@ where
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>,
     {
         visitor.visit_str(self.value)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct enum identifier ignored_any
     }
 }
 
 pub struct BytesDeserializer<'a, E> {
     value: &'a [u8],
     marker: PhantomData<E>,
 }
 
@@ -2611,19 +2612,19 @@ where
     fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>,
     {
         visitor.visit_bytes(self.value)
     }
 
     forward_to_deserialize_any! {
-        bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str string bytes
-        byte_buf option unit unit_struct newtype_struct seq tuple tuple_struct
-        map struct enum identifier ignored_any
+        bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char str string
+        bytes byte_buf option unit unit_struct newtype_struct seq tuple
+        tuple_struct map struct enum identifier ignored_any
     }
 }
 
 /// A DeserializeSeed helper for implementing deserialize_in_place Visitors.
 ///
 /// Wraps a mutable reference and calls deserialize_in_place on it.
 pub struct InPlaceSeed<'a, T: 'a>(pub &'a mut T);
 
@@ -2717,29 +2718,29 @@ where
             name
         )))
     }
 
     fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>,
     {
-        visitor.visit_map(FlatMapAccess::new(self.0.iter_mut(), None))
+        visitor.visit_map(FlatMapAccess::new(self.0.iter()))
     }
 
     fn deserialize_struct<V>(
         self,
         _: &'static str,
         fields: &'static [&'static str],
         visitor: V,
     ) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>,
     {
-        visitor.visit_map(FlatMapAccess::new(self.0.iter_mut(), Some(fields)))
+        visitor.visit_map(FlatStructAccess::new(self.0.iter_mut(), fields))
     }
 
     fn deserialize_newtype_struct<V>(self, _name: &str, visitor: V) -> Result<V::Value, Self::Error>
     where
         V: Visitor<'de>,
     {
         visitor.visit_newtype_struct(self)
     }
@@ -2778,32 +2779,29 @@ where
         deserialize_tuple_struct(&'static str, usize)
         deserialize_identifier()
         deserialize_ignored_any()
     }
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 pub struct FlatMapAccess<'a, 'de: 'a, E> {
-    iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>,
-    pending_content: Option<Content<'de>>,
-    fields: Option<&'static [&'static str]>,
+    iter: slice::Iter<'a, Option<(Content<'de>, Content<'de>)>>,
+    pending_content: Option<&'a Content<'de>>,
     _marker: PhantomData<E>,
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 impl<'a, 'de, E> FlatMapAccess<'a, 'de, E> {
     fn new(
-        iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>,
-        fields: Option<&'static [&'static str]>,
+        iter: slice::Iter<'a, Option<(Content<'de>, Content<'de>)>>,
     ) -> FlatMapAccess<'a, 'de, E> {
         FlatMapAccess {
             iter: iter,
             pending_content: None,
-            fields: fields,
             _marker: PhantomData,
         }
     }
 }
 
 #[cfg(any(feature = "std", feature = "alloc"))]
 impl<'a, 'de, E> MapAccess<'de> for FlatMapAccess<'a, 'de, E>
 where
@@ -2811,29 +2809,77 @@ where
 {
     type Error = E;
 
     fn next_key_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
     where
         T: DeserializeSeed<'de>,
     {
         while let Some(item) = self.iter.next() {
+            // Items in the vector are nulled out when used by a struct.
+            if let Some((ref key, ref content)) = *item {
+                self.pending_content = Some(content);
+                return seed.deserialize(ContentRefDeserializer::new(key)).map(Some);
+            }
+        }
+        Ok(None)
+    }
+
+    fn next_value_seed<T>(&mut self, seed: T) -> Result<T::Value, Self::Error>
+    where
+        T: DeserializeSeed<'de>,
+    {
+        match self.pending_content.take() {
+            Some(value) => seed.deserialize(ContentRefDeserializer::new(value)),
+            None => Err(Error::custom("value is missing")),
+        }
+    }
+}
+
+#[cfg(any(feature = "std", feature = "alloc"))]
+pub struct FlatStructAccess<'a, 'de: 'a, E> {
+    iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>,
+    pending_content: Option<Content<'de>>,
+    fields: &'static [&'static str],
+    _marker: PhantomData<E>,
+}
+
+#[cfg(any(feature = "std", feature = "alloc"))]
+impl<'a, 'de, E> FlatStructAccess<'a, 'de, E> {
+    fn new(
+        iter: slice::IterMut<'a, Option<(Content<'de>, Content<'de>)>>,
+        fields: &'static [&'static str],
+    ) -> FlatStructAccess<'a, 'de, E> {
+        FlatStructAccess {
+            iter: iter,
+            pending_content: None,
+            fields: fields,
+            _marker: PhantomData,
+        }
+    }
+}
+
+#[cfg(any(feature = "std", feature = "alloc"))]
+impl<'a, 'de, E> MapAccess<'de> for FlatStructAccess<'a, 'de, E>
+where
+    E: Error,
+{
+    type Error = E;
+
+    fn next_key_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
+    where
+        T: DeserializeSeed<'de>,
+    {
+        while let Some(item) = self.iter.next() {
             // items in the vector are nulled out when used.  So we can only use
             // an item if it's still filled in and if the field is one we care
             // about.  In case we do not know which fields we want, we take them all.
             let use_item = match *item {
                 None => false,
-                Some((ref c, _)) => {
-                    c.as_str()
-                        .map_or(self.fields.is_none(), |key| match self.fields {
-                            None => true,
-                            Some(fields) if fields.contains(&key) => true,
-                            _ => false,
-                        })
-                }
+                Some((ref c, _)) => c.as_str().map_or(false, |key| self.fields.contains(&key)),
             };
 
             if use_item {
                 let (key, content) = item.take().unwrap();
                 self.pending_content = Some(content);
                 return seed.deserialize(ContentDeserializer::new(key)).map(Some);
             }
         }
--- a/third_party/rust/serde/src/private/ser.rs
+++ b/third_party/rust/serde/src/private/ser.rs
@@ -942,17 +942,18 @@ mod content {
             self.key = Some(key);
             Ok(())
         }
 
         fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), E>
         where
             T: Serialize,
         {
-            let key = self.key
+            let key = self
+                .key
                 .take()
                 .expect("serialize_value called before serialize_key");
             let value = try!(value.serialize(ContentSerializer::<E>::new()));
             self.entries.push((key, value));
             Ok(())
         }
 
         fn end(self) -> Result<Content, E> {
--- a/third_party/rust/serde/src/ser/impls.rs
+++ b/third_party/rust/serde/src/ser/impls.rs
@@ -3,20 +3,17 @@
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 use lib::*;
 
-use ser::{Serialize, SerializeTuple, Serializer};
-
-#[cfg(feature = "std")]
-use ser::Error;
+use ser::{Error, Serialize, SerializeTuple, Serializer};
 
 ////////////////////////////////////////////////////////////////////////////////
 
 macro_rules! primitive_impl {
     ($ty:ident, $method:ident $($cast:tt)*) => {
         impl Serialize for $ty {
             #[inline]
             fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
@@ -39,16 +36,21 @@ primitive_impl!(usize, serialize_u64 as 
 primitive_impl!(u8, serialize_u8);
 primitive_impl!(u16, serialize_u16);
 primitive_impl!(u32, serialize_u32);
 primitive_impl!(u64, serialize_u64);
 primitive_impl!(f32, serialize_f32);
 primitive_impl!(f64, serialize_f64);
 primitive_impl!(char, serialize_char);
 
+serde_if_integer128! {
+    primitive_impl!(i128, serialize_i128);
+    primitive_impl!(u128, serialize_u128);
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 impl Serialize for str {
     #[inline]
     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
     where
         S: Serializer,
     {
@@ -240,16 +242,26 @@ impl Serialize for () {
     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
     where
         S: Serializer,
     {
         serializer.serialize_unit()
     }
 }
 
+#[cfg(feature = "unstable")]
+impl Serialize for ! {
+    fn serialize<S>(&self, _serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: Serializer,
+    {
+        *self
+    }
+}
+
 ////////////////////////////////////////////////////////////////////////////////
 
 macro_rules! tuple_impls {
     ($($len:expr => ($($n:tt $name:ident)+))+) => {
         $(
             impl<$($name),+> Serialize for ($($name,)+)
             where
                 $($name: Serialize,)+
@@ -406,18 +418,18 @@ where
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 macro_rules! nonzero_integers {
     ( $( $T: ident, )+ ) => {
         $(
-            #[cfg(feature = "unstable")]
-            impl Serialize for $T {
+            #[cfg(num_nonzero)]
+            impl Serialize for num::$T {
                 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
                 where
                     S: Serializer,
                 {
                     self.get().serialize(serializer)
                 }
             }
         )+
@@ -449,17 +461,20 @@ where
 impl<T> Serialize for RefCell<T>
 where
     T: Serialize,
 {
     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
     where
         S: Serializer,
     {
-        self.borrow().serialize(serializer)
+        match self.try_borrow() {
+            Ok(value) => value.serialize(serializer),
+            Err(_) => Err(S::Error::custom("already mutably borrowed")),
+        }
     }
 }
 
 #[cfg(feature = "std")]
 impl<T> Serialize for Mutex<T>
 where
     T: Serialize,
 {
@@ -507,17 +522,17 @@ where
                 serializer.serialize_newtype_variant("Result", 1, "Err", value)
             }
         }
     }
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
-#[cfg(feature = "std")]
+#[cfg(any(core_duration, feature = "std"))]
 impl Serialize for Duration {
     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
     where
         S: Serializer,
     {
         use super::SerializeStruct;
         let mut state = try!(serializer.serialize_struct("Duration", 2));
         try!(state.serialize_field("secs", &self.as_secs()));
@@ -530,17 +545,18 @@ impl Serialize for Duration {
 
 #[cfg(feature = "std")]
 impl Serialize for SystemTime {
     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
     where
         S: Serializer,
     {
         use super::SerializeStruct;
-        let duration_since_epoch = self.duration_since(UNIX_EPOCH)
+        let duration_since_epoch = self
+            .duration_since(UNIX_EPOCH)
             .expect("SystemTime must be later than UNIX_EPOCH");
         let mut state = try!(serializer.serialize_struct("SystemTime", 2));
         try!(state.serialize_field("secs_since_epoch", &duration_since_epoch.as_secs()));
         try!(state.serialize_field("nanos_since_epoch", &duration_since_epoch.subsec_nanos()));
         state.end()
     }
 }
 
--- a/third_party/rust/serde/src/ser/mod.rs
+++ b/third_party/rust/serde/src/ser/mod.rs
@@ -43,25 +43,25 @@
 //!
 //! A partial list of well-maintained formats is given on the [Serde
 //! website][data formats].
 //!
 //! # Implementations of Serialize provided by Serde
 //!
 //!  - **Primitive types**:
 //!    - bool
-//!    - i8, i16, i32, i64, isize
-//!    - u8, u16, u32, u64, usize
+//!    - i8, i16, i32, i64, i128, isize
+//!    - u8, u16, u32, u64, u128, usize
 //!    - f32, f64
 //!    - char
 //!    - str
 //!    - &T and &mut T
 //!  - **Compound types**:
-//!    - [T]
-//!    - [T; 0] through [T; 32]
+//!    - \[T\]
+//!    - \[T; 0\] through \[T; 32\]
 //!    - tuples up to size 16
 //!  - **Common standard library types**:
 //!    - String
 //!    - Option\<T\>
 //!    - Result\<T, E\>
 //!    - PhantomData\<T\>
 //!  - **Wrapper types**:
 //!    - Box\<T\>
@@ -87,17 +87,18 @@
 //!    - OsStr
 //!    - OsString
 //!  - **Miscellaneous standard library types**:
 //!    - Duration
 //!    - SystemTime
 //!    - Path
 //!    - PathBuf
 //!    - Range\<T\>
-//!    - num::NonZero* (unstable)
+//!    - num::NonZero*
+//!    - `!` *(unstable)*
 //!  - **Net types**:
 //!    - IpAddr
 //!    - Ipv4Addr
 //!    - Ipv6Addr
 //!    - SocketAddr
 //!    - SocketAddrV4
 //!    - SocketAddrV6
 //!
@@ -122,16 +123,23 @@ pub use self::impossible::Impossible;
 
 ////////////////////////////////////////////////////////////////////////////////
 
 macro_rules! declare_error_trait {
     (Error: Sized $(+ $($supertrait:ident)::+)*) => {
         /// Trait used by `Serialize` implementations to generically construct
         /// errors belonging to the `Serializer` against which they are
         /// currently running.
+        ///
+        /// # Example implementation
+        ///
+        /// The [example data format] presented on the website shows an error
+        /// type appropriate for a basic JSON data format.
+        ///
+        /// [example data format]: https://serde.rs/data-format.html
         pub trait Error: Sized $(+ $($supertrait)::+)* {
             /// Used when a [`Serialize`] implementation encounters any error
             /// while serializing a type.
             ///
             /// The message should not be capitalized and should not end with a
             /// period.
             ///
             /// For example, a filesystem [`Path`] may refuse to serialize
@@ -239,37 +247,37 @@ pub trait Serialize {
     where
         S: Serializer;
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 
 /// A **data format** that can serialize any data structure supported by Serde.
 ///
-/// The role of this trait is to define the serialization half of the Serde data
-/// model, which is a way to categorize every Rust data structure into one of 27
-/// possible types. Each method of the `Serializer` trait corresponds to one of
-/// the types of the data model.
+/// The role of this trait is to define the serialization half of the [Serde
+/// data model], which is a way to categorize every Rust data structure into one
+/// of 29 possible types. Each method of the `Serializer` trait corresponds to
+/// one of the types of the data model.
 ///
 /// Implementations of `Serialize` map themselves into this data model by
 /// invoking exactly one of the `Serializer` methods.
 ///
 /// The types that make up the Serde data model are:
 ///
-///  - **12 primitive types**
+///  - **14 primitive types**
 ///    - bool
-///    - i8, i16, i32, i64
-///    - u8, u16, u32, u64
+///    - i8, i16, i32, i64, i128
+///    - u8, u16, u32, u64, u128
 ///    - f32, f64
 ///    - char
 ///  - **string**
 ///    - UTF-8 bytes with a length and no null terminator.
 ///    - When serializing, all strings are handled equally. When deserializing,
 ///      there are three flavors of strings: transient, owned, and borrowed.
-///  - **byte array** - [u8]
+///  - **byte array** - \[u8\]
 ///    - Similar to strings, during deserialization byte arrays can be transient,
 ///      owned, or borrowed.
 ///  - **option**
 ///    - Either none or some value.
 ///  - **unit**
 ///    - The type of `()` in Rust. It represents an anonymous value containing no
 ///      data.
 ///  - **unit_struct**
@@ -304,16 +312,25 @@ pub trait Serialize {
 ///    - For example the `E::S` in `enum E { S { r: u8, g: u8, b: u8 } }`.
 ///
 /// Many Serde serializers produce text or binary data as output, for example
 /// JSON or Bincode. This is not a requirement of the `Serializer` trait, and
 /// there are serializers that do not produce text or binary output. One example
 /// is the `serde_json::value::Serializer` (distinct from the main `serde_json`
 /// serializer) that produces a `serde_json::Value` data structure in memory as
 /// output.
+///
+/// [Serde data model]: https://serde.rs/data-model.html
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website contains example code for
+/// a basic JSON `Serializer`.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait Serializer: Sized {
     /// The output type produced by this `Serializer` during successful
     /// serialization. Most serializers that produce text or binary output
     /// should set `Ok = ()` and serialize into an [`io::Write`] or buffer
     /// contained within the `Serializer` instance. Serializers that build
     /// in-memory data structures may be simplified by using `Ok` to propagate
     /// the data structure around.
     ///
@@ -487,16 +504,47 @@ pub trait Serializer: Sized {
     ///         serializer.serialize_i64(*self)
     ///     }
     /// }
     /// #
     /// # fn main() {}
     /// ```
     fn serialize_i64(self, v: i64) -> Result<Self::Ok, Self::Error>;
 
+    serde_if_integer128! {
+        /// Serialize an `i128` value.
+        ///
+        /// ```rust
+        /// # #[macro_use]
+        /// # extern crate serde;
+        /// #
+        /// # use serde::Serializer;
+        /// #
+        /// # __private_serialize!();
+        /// #
+        /// impl Serialize for i128 {
+        ///     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+        ///     where
+        ///         S: Serializer,
+        ///     {
+        ///         serializer.serialize_i128(*self)
+        ///     }
+        /// }
+        /// #
+        /// # fn main() {}
+        /// ```
+        ///
+        /// This method is available only on Rust compiler versions >=1.26. The
+        /// default behavior unconditionally returns an error.
+        fn serialize_i128(self, v: i128) -> Result<Self::Ok, Self::Error> {
+            let _ = v;
+            Err(Error::custom("i128 is not supported"))
+        }
+    }
+
     /// Serialize a `u8` value.
     ///
     /// If the format does not differentiate between `u8` and `u64`, a
     /// reasonable implementation would be to cast the value to `u64` and
     /// forward to `serialize_u64`.
     ///
     /// ```rust
     /// # #[macro_use]
@@ -591,16 +639,47 @@ pub trait Serializer: Sized {
     ///         serializer.serialize_u64(*self)
     ///     }
     /// }
     /// #
     /// # fn main() {}
     /// ```
     fn serialize_u64(self, v: u64) -> Result<Self::Ok, Self::Error>;
 
+    serde_if_integer128! {
+        /// Serialize a `u128` value.
+        ///
+        /// ```rust
+        /// # #[macro_use]
+        /// # extern crate serde;
+        /// #
+        /// # use serde::Serializer;
+        /// #
+        /// # __private_serialize!();
+        /// #
+        /// impl Serialize for u128 {
+        ///     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+        ///     where
+        ///         S: Serializer,
+        ///     {
+        ///         serializer.serialize_u128(*self)
+        ///     }
+        /// }
+        /// #
+        /// # fn main() {}
+        /// ```
+        ///
+        /// This method is available only on Rust compiler versions >=1.26. The
+        /// default behavior unconditionally returns an error.
+        fn serialize_u128(self, v: u128) -> Result<Self::Ok, Self::Error> {
+            let _ = v;
+            Err(Error::custom("u128 is not supported"))
+        }
+    }
+
     /// Serialize an `f32` value.
     ///
     /// If the format does not differentiate between `f32` and `f64`, a
     /// reasonable implementation would be to cast the value to `f64` and
     /// forward to `serialize_f64`.
     ///
     /// ```rust
     /// # #[macro_use]
@@ -1453,16 +1532,18 @@ pub trait Serializer: Sized {
     #[inline]
     fn is_human_readable(&self) -> bool {
         true
     }
 }
 
 /// Returned from `Serializer::serialize_seq`.
 ///
+/// # Example use
+///
 /// ```rust
 /// # use std::marker::PhantomData;
 /// #
 /// # struct Vec<T>(PhantomData<T>);
 /// #
 /// # impl<T> Vec<T> {
 /// #     fn len(&self) -> usize {
 /// #         unimplemented!()
@@ -1490,16 +1571,23 @@ pub trait Serializer: Sized {
 ///         let mut seq = serializer.serialize_seq(Some(self.len()))?;
 ///         for element in self {
 ///             seq.serialize_element(element)?;
 ///         }
 ///         seq.end()
 ///     }
 /// }
 /// ```
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SerializeSeq` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SerializeSeq {
     /// Must match the `Ok` type of our `Serializer`.
     type Ok;
 
     /// Must match the `Error` type of our `Serializer`.
     type Error: Error;
 
     /// Serialize a sequence element.
@@ -1508,16 +1596,18 @@ pub trait SerializeSeq {
         T: Serialize;
 
     /// Finish serializing a sequence.
     fn end(self) -> Result<Self::Ok, Self::Error>;
 }
 
 /// Returned from `Serializer::serialize_tuple`.
 ///
+/// # Example use
+///
 /// ```rust
 /// use serde::ser::{Serialize, Serializer, SerializeTuple};
 ///
 /// # mod fool {
 /// #     trait Serialize {}
 /// impl<A, B, C> Serialize for (A, B, C)
 /// #     {}
 /// # }
@@ -1581,16 +1671,23 @@ pub trait SerializeSeq {
 ///         let mut seq = serializer.serialize_tuple(16)?;
 ///         for element in self {
 ///             seq.serialize_element(element)?;
 ///         }
 ///         seq.end()
 ///     }
 /// }
 /// ```
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SerializeTuple` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SerializeTuple {
     /// Must match the `Ok` type of our `Serializer`.
     type Ok;
 
     /// Must match the `Error` type of our `Serializer`.
     type Error: Error;
 
     /// Serialize a tuple element.
@@ -1599,16 +1696,18 @@ pub trait SerializeTuple {
         T: Serialize;
 
     /// Finish serializing a tuple.
     fn end(self) -> Result<Self::Ok, Self::Error>;
 }
 
 /// Returned from `Serializer::serialize_tuple_struct`.
 ///
+/// # Example use
+///
 /// ```rust
 /// use serde::ser::{Serialize, Serializer, SerializeTupleStruct};
 ///
 /// struct Rgb(u8, u8, u8);
 ///
 /// impl Serialize for Rgb {
 ///     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
 ///     where
@@ -1617,16 +1716,23 @@ pub trait SerializeTuple {
 ///         let mut ts = serializer.serialize_tuple_struct("Rgb", 3)?;
 ///         ts.serialize_field(&self.0)?;
 ///         ts.serialize_field(&self.1)?;
 ///         ts.serialize_field(&self.2)?;
 ///         ts.end()
 ///     }
 /// }
 /// ```
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SerializeTupleStruct` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SerializeTupleStruct {
     /// Must match the `Ok` type of our `Serializer`.
     type Ok;
 
     /// Must match the `Error` type of our `Serializer`.
     type Error: Error;
 
     /// Serialize a tuple struct field.
@@ -1635,16 +1741,18 @@ pub trait SerializeTupleStruct {
         T: Serialize;
 
     /// Finish serializing a tuple struct.
     fn end(self) -> Result<Self::Ok, Self::Error>;
 }
 
 /// Returned from `Serializer::serialize_tuple_variant`.
 ///
+/// # Example use
+///
 /// ```rust
 /// use serde::ser::{Serialize, Serializer, SerializeTupleVariant};
 ///
 /// enum E {
 ///     T(u8, u8),
 ///     U(String, u32, u32),
 /// }
 ///
@@ -1666,16 +1774,23 @@ pub trait SerializeTupleStruct {
 ///                 tv.serialize_field(b)?;
 ///                 tv.serialize_field(c)?;
 ///                 tv.end()
 ///             }
 ///         }
 ///     }
 /// }
 /// ```
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SerializeTupleVariant` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SerializeTupleVariant {
     /// Must match the `Ok` type of our `Serializer`.
     type Ok;
 
     /// Must match the `Error` type of our `Serializer`.
     type Error: Error;
 
     /// Serialize a tuple variant field.
@@ -1684,16 +1799,18 @@ pub trait SerializeTupleVariant {
         T: Serialize;
 
     /// Finish serializing a tuple variant.
     fn end(self) -> Result<Self::Ok, Self::Error>;
 }
 
 /// Returned from `Serializer::serialize_map`.
 ///
+/// # Example use
+///
 /// ```rust
 /// # use std::marker::PhantomData;
 /// #
 /// # struct HashMap<K, V>(PhantomData<K>, PhantomData<V>);
 /// #
 /// # impl<K, V> HashMap<K, V> {
 /// #     fn len(&self) -> usize {
 /// #         unimplemented!()
@@ -1723,16 +1840,23 @@ pub trait SerializeTupleVariant {
 ///         let mut map = serializer.serialize_map(Some(self.len()))?;
 ///         for (k, v) in self {
 ///             map.serialize_entry(k, v)?;
 ///         }
 ///         map.end()
 ///     }
 /// }
 /// ```
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SerializeMap` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SerializeMap {
     /// Must match the `Ok` type of our `Serializer`.
     type Ok;
 
     /// Must match the `Error` type of our `Serializer`.
     type Error: Error;
 
     /// Serialize a map key.
@@ -1786,16 +1910,18 @@ pub trait SerializeMap {
     }
 
     /// Finish serializing a map.
     fn end(self) -> Result<Self::Ok, Self::Error>;
 }
 
 /// Returned from `Serializer::serialize_struct`.
 ///
+/// # Example use
+///
 /// ```rust
 /// use serde::ser::{Serialize, Serializer, SerializeStruct};
 ///
 /// struct Rgb {
 ///     r: u8,
 ///     g: u8,
 ///     b: u8,
 /// }
@@ -1808,16 +1934,23 @@ pub trait SerializeMap {
 ///         let mut rgb = serializer.serialize_struct("Rgb", 3)?;
 ///         rgb.serialize_field("r", &self.r)?;
 ///         rgb.serialize_field("g", &self.g)?;
 ///         rgb.serialize_field("b", &self.b)?;
 ///         rgb.end()
 ///     }
 /// }
 /// ```
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SerializeStruct` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SerializeStruct {
     /// Must match the `Ok` type of our `Serializer`.
     type Ok;
 
     /// Must match the `Error` type of our `Serializer`.
     type Error: Error;
 
     /// Serialize a struct field.
@@ -1837,16 +1970,18 @@ pub trait SerializeStruct {
     }
 
     /// Finish serializing a struct.
     fn end(self) -> Result<Self::Ok, Self::Error>;
 }
 
 /// Returned from `Serializer::serialize_struct_variant`.
 ///
+/// # Example use
+///
 /// ```rust
 /// use serde::ser::{Serialize, Serializer, SerializeStructVariant};
 ///
 /// enum E {
 ///     S { r: u8, g: u8, b: u8 }
 /// }
 ///
 /// impl Serialize for E {
@@ -1861,16 +1996,23 @@ pub trait SerializeStruct {
 ///                 sv.serialize_field("g", g)?;
 ///                 sv.serialize_field("b", b)?;
 ///                 sv.end()
 ///             }
 ///         }
 ///     }
 /// }
 /// ```
+///
+/// # Example implementation
+///
+/// The [example data format] presented on the website demonstrates an
+/// implementation of `SerializeStructVariant` for a basic JSON data format.
+///
+/// [example data format]: https://serde.rs/data-format.html
 pub trait SerializeStructVariant {
     /// Must match the `Ok` type of our `Serializer`.
     type Ok;
 
     /// Must match the `Error` type of our `Serializer`.
     type Error: Error;
 
     /// Serialize a struct variant field.
--- a/third_party/rust/serde_derive/.cargo-checksum.json
+++ b/third_party/rust/serde_derive/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"c89d9663763b180430561da049809839332300df1fb69d544fded497f06eaab3","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"16de77b2d10fac8c70219ba183ad083ae12d4553c6f3213dec39d9936622b371","src/bound.rs":"4f8453ecfa0b6231abd35985bc0bfe64d42df613102ff6c8357425666392572b","src/de.rs":"2651f6cb8851b5fc7c1cd8d5d1ab9806064775bd33d31d4d2f056000ef561b93","src/fragment.rs":"4e34eca12e6ac4bd89f55e85786cfb677e98182f21ce1f51c0573bca5a8bcd27","src/internals/ast.rs":"2aea7bead7503c799320ad03ab03f1460e08445bc8234fbb9def99fdbebadfa5","src/internals/attr.rs":"ab3b80b8374b98d44a2fe363cbd967d94064d38ac4802d13a70ba4ab61aac4a2","src/internals/case.rs":"a07abcf7e2b7e4d72eaa1b3ce97d82b17c94533ed0ea88ea5d13d3cbf4394520","src/internals/check.rs":"72c3e01ba8adabcc9530235d0d317c54392a7fae156763022c8fd812aea86791","src/internals/ctxt.rs":"f541083477396a13cf7abd3dab448907112f900a6395ddd42ed0a6717ada1016","src/internals/mod.rs":"853f91744e77cb54222092225bc61a02b42b83ee25cb99079f7c55fbc868d481","src/lib.rs":"5ec1c796b697c55f285080db930ddaad852097763dddf11ddb05ad12eaabd32a","src/pretend.rs":"18cbafe2c936e831fe41b03562c0e8f49abfd27d29d6d31cb54e5adff39ec606","src/ser.rs":"b2c58b05fcb5aba689af722121a5db40b1d247b0d875f390769a6d98413af22f","src/try.rs":"93d3d832168f1df37852bca5e8ebfd8b77982379ecd68bb361669cae413491f8"},"package":null}
\ No newline at end of file
+{"files":{"Cargo.toml":"13802d367c4f23465fb93d2579c1bea247cd7f8310fdd90b2d0f817ad93f17a0","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"6485b8ed310d3f0340bf1ad1f47645069ce4069dcc6bb46c7d5c6faf41de1fdb","README.md":"fe0d3aa328d0e26c76f646c45197bc8c518257d31f3640baed096915b9bdf642","crates-io.md":"4de7b932987feadedd392855d166d0346e8f405190e8085a2e2f84c936a68959","src/bound.rs":"c85f8a617dcc90e5a9269305665d1610364fbb414c22cb5cadcedbe45f0c86e7","src/de.rs":"3e05e5b898f31c3ef96ed51f4e0a9b529ef6ff3240a980c450f6e4ed637c1ee3","src/fragment.rs":"9a8539d2bda80c62e4fdaf42816f371a2add62f9cf27b4646d90b636de40f56f","src/internals/ast.rs":"47b94e6b3ec03725c2c25e5764dca10286be02e975b11e5fe8e0879a1cbde424","src/internals/attr.rs":"5fc779f293fb33d2977dc0f7e86020acd9ce36fcbc90b199ba579d7a5542281b","src/internals/case.rs":"a07abcf7e2b7e4d72eaa1b3ce97d82b17c94533ed0ea88ea5d13d3cbf4394520","src/internals/check.rs":"ce47c103ec0683bdba07288834008977645b5f2e23382cabc27f6de927b724dc","src/internals/ctxt.rs":"f541083477396a13cf7abd3dab448907112f900a6395ddd42ed0a6717ada1016","src/internals/mod.rs":"414477f6fb187268ec87f917c0db9d328f4e6d188dfde93ab7479763e93e2aca","src/lib.rs":"b35a93042a3c4dec7d7871eb901e514ae28bc88183c49157a3ca4f7732aa7785","src/pretend.rs":"ea5aa1b338038ce1791cef34fd20091abb062cd61c0384ac931d0069413a5302","src/ser.rs":"6ac0b5ab9cd5a93486d7c1ca974afc95abadb11a4b923b33d04f44d1b42ffa5d","src/try.rs":"b9a10c8690d442a57fc7097d42c9a4f13034c7b4a30b7eb02d538fdbf8ae0a8d"},"package":null}
\ No newline at end of file
--- a/third_party/rust/serde_derive/Cargo.toml
+++ b/third_party/rust/serde_derive/Cargo.toml
@@ -1,31 +1,31 @@
 [package]
 name = "serde_derive"
-version = "1.0.58" # remember to update html_root_url
+version = "1.0.66" # remember to update html_root_url
 authors = ["Erick Tryzelaar <erick.tryzelaar@gmail.com>", "David Tolnay <dtolnay@gmail.com>"]
 license = "MIT/Apache-2.0"
 description = "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]"
 homepage = "https://serde.rs"
 repository = "https://github.com/serde-rs/serde"
 documentation = "https://serde.rs/codegen.html"
 keywords = ["serde", "serialization", "no_std"]
-readme = "README.md"
-include = ["Cargo.toml", "src/**/*.rs", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
+readme = "crates-io.md"
+include = ["Cargo.toml", "src/**/*.rs", "crates-io.md", "README.md", "LICENSE-APACHE", "LICENSE-MIT"]
 
 [badges]
 travis-ci = { repository = "serde-rs/serde" }
 
 [features]
 default = []
 deserialize_in_place = []
 
 [lib]
 name = "serde_derive"
 proc-macro = true
 
 [dependencies]
-proc-macro2 = "0.3"
-quote = "0.5.2"
-syn = { version = "0.13", features = ["visit", "extra-traits"] }
+proc-macro2 = "0.4"
+quote = "0.6"
+syn = { version = "0.14", features = ["visit", "extra-traits"] }
 
 [dev-dependencies]
 serde = { version = "1.0", path = "../serde" }
--- a/third_party/rust/serde_derive/README.md
+++ b/third_party/rust/serde_derive/README.md
@@ -20,17 +20,17 @@ You may be looking for:
 - [API documentation](https://docs.serde.rs/serde/)
 - [Release notes](https://github.com/serde-rs/serde/releases)
 
 ## Serde in action
 
 <details>
 <summary>
 Click to show Cargo.toml.
-<a href="http://play.integer32.com/?gist=9003c5b88c1f4989941925d7190c6eec" target="_blank">Run this code in the playground.</a>
+<a href="https://play.rust-lang.org/?gist=9003c5b88c1f4989941925d7190c6eec" target="_blank">Run this code in the playground.</a>
 </summary>
 
 ```toml
 [dependencies]
 
 # The core APIs, including the Serialize and Deserialize traits. Always
 # required when using Serde.
 serde = "1.0"
new file mode 100644
--- /dev/null
+++ b/third_party/rust/serde_derive/crates-io.md
@@ -0,0 +1,55 @@
+<!-- Serde readme rendered on crates.io -->
+
+**Serde is a framework for *ser*ializing and *de*serializing Rust data structures efficiently and generically.**
+
+---
+
+You may be looking for:
+
+- [An overview of Serde](https://serde.rs/)
+- [Data formats supported by Serde](https://serde.rs/#data-formats)
+- [Setting up `#[derive(Serialize, Deserialize)]`](https://serde.rs/codegen.html)
+- [Examples](https://serde.rs/examples.html)
+- [API documentation](https://docs.serde.rs/serde/)
+- [Release notes](https://github.com/serde-rs/serde/releases)
+
+## Serde in action
+
+```rust
+#[macro_use]
+extern crate serde_derive;
+
+extern crate serde;
+extern crate serde_json;
+
+#[derive(Serialize, Deserialize, Debug)]
+struct Point {
+    x: i32,
+    y: i32,
+}
+
+fn main() {
+    let point = Point { x: 1, y: 2 };
+
+    // Convert the Point to a JSON string.
+    let serialized = serde_json::to_string(&point).unwrap();
+
+    // Prints serialized = {"x":1,"y":2}
+    println!("serialized = {}", serialized);
+
+    // Convert the JSON string back to a Point.
+    let deserialized: Point = serde_json::from_str(&serialized).unwrap();
+
+    // Prints deserialized = Point { x: 1, y: 2 }
+    println!("deserialized = {:?}", deserialized);
+}
+```
+
+## Getting help
+
+Serde developers live in the #serde channel on
+[`irc.mozilla.org`](https://wiki.mozilla.org/IRC). The #rust channel is also a
+good resource with generally faster response time but less specific knowledge
+about Serde. If IRC is not your thing or you don't get a good response, we are
+happy to respond to [GitHub issues](https://github.com/serde-rs/serde/issues/new)
+as well.
--- a/third_party/rust/serde_derive/src/bound.rs
+++ b/third_party/rust/serde_derive/src/bound.rs
@@ -50,17 +50,18 @@ pub fn with_where_predicates(
     generics
 }
 
 pub fn with_where_predicates_from_fields(
     cont: &Container,
     generics: &syn::Generics,
     from_field: fn(&attr::Field) -> Option<&[syn::WherePredicate]>,
 ) -> syn::Generics {
-    let predicates = cont.data
+    let predicates = cont
+        .data
         .all_fields()
         .flat_map(|field| from_field(&field.attrs))
         .flat_map(|predicates| predicates.to_vec());
 
     let mut generics = generics.clone();
     generics.make_where_clause().predicates.extend(predicates);
     generics
 }
@@ -134,34 +135,37 @@ pub fn with_bound(
             if let Some(seg) = path.segments.last() {
                 if seg.into_value().ident == "PhantomData" {
                     // Hardcoded exception, because PhantomData<T> implements
                     // Serialize and Deserialize whether or not T implements it.
                     return;
                 }
             }
             if path.leading_colon.is_none() && path.segments.len() == 1 {
-                let id = path.segments[0].ident;
-                if self.all_type_params.contains(&id) {
-                    self.relevant_type_params.insert(id);
+                let id = &path.segments[0].ident;
+                if self.all_type_params.contains(id) {
+                    self.relevant_type_params.insert(id.clone());
                 }
             }
             visit::visit_path(self, path);
         }
 
         // Type parameter should not be considered used by a macro path.
         //
         //     struct TypeMacro<T> {
         //         mac: T!(),
         //         marker: PhantomData<T>,
         //     }
         fn visit_macro(&mut self, _mac: &'ast syn::Macro) {}
     }
 
-    let all_type_params = generics.type_params().map(|param| param.ident).collect();
+    let all_type_params = generics
+        .type_params()
+        .map(|param| param.ident.clone())
+        .collect();
 
     let mut visitor = FindTyParams {
         all_type_params: all_type_params,
         relevant_type_params: HashSet::new(),
         associated_type_usage: Vec::new(),
     };
     match cont.data {
         Data::Enum(_, ref variants) => for variant in variants.iter() {
@@ -179,17 +183,17 @@ pub fn with_bound(
             }
         }
     }
 
     let relevant_type_params = visitor.relevant_type_params;
     let associated_type_usage = visitor.associated_type_usage;
     let new_predicates = generics
         .type_params()
-        .map(|param| param.ident)
+        .map(|param| param.ident.clone())
         .filter(|id| relevant_type_params.contains(id))
         .map(|id| syn::TypePath {
             qself: None,
             path: id.into(),
         })
         .chain(associated_type_usage.into_iter().cloned())
         .map(|bounded_ty| {
             syn::WherePredicate::Type(syn::PredicateType {
@@ -241,30 +245,32 @@ pub fn with_self_bound(
         }));
     generics
 }
 
 pub fn with_lifetime_bound(generics: &syn::Generics, lifetime: &str) -> syn::Generics {
     let bound = syn::Lifetime::new(lifetime, Span::call_site());
     let def = syn::LifetimeDef {
         attrs: Vec::new(),
-        lifetime: bound,
+        lifetime: bound.clone(),
         colon_token: None,
         bounds: Punctuated::new(),
     };
 
     let params = Some(syn::GenericParam::Lifetime(def))
         .into_iter()
         .chain(generics.params.iter().cloned().map(|mut param| {
             match param {
                 syn::GenericParam::Lifetime(ref mut param) => {
-                    param.bounds.push(bound);
+                    param.bounds.push(bound.clone());
                 }
                 syn::GenericParam::Type(ref mut param) => {
-                    param.bounds.push(syn::TypeParamBound::Lifetime(bound));
+                    param
+                        .bounds
+                        .push(syn::TypeParamBound::Lifetime(bound.clone()));
                 }
                 syn::GenericParam::Const(_) => {}
             }
             param
         }))
         .collect();
 
     syn::Generics {
@@ -274,33 +280,34 @@ pub fn with_lifetime_bound(generics: &sy
 }
 
 fn type_of_item(cont: &Container) -> syn::Type {
     syn::Type::Path(syn::TypePath {
         qself: None,
         path: syn::Path {
             leading_colon: None,
             segments: vec![syn::PathSegment {
-                ident: cont.ident,
+                ident: cont.ident.clone(),
                 arguments: syn::PathArguments::AngleBracketed(
                     syn::AngleBracketedGenericArguments {
                         colon2_token: None,
                         lt_token: Default::default(),
-                        args: cont.generics
+                        args: cont
+                            .generics
                             .params
                             .iter()
                             .map(|param| match *param {
                                 syn::GenericParam::Type(ref param) => {
                                     syn::GenericArgument::Type(syn::Type::Path(syn::TypePath {
                                         qself: None,
-                                        path: param.ident.into(),
+                                        path: param.ident.clone().into(),
                                     }))
                                 }
                                 syn::GenericParam::Lifetime(ref param) => {
-                                    syn::GenericArgument::Lifetime(param.lifetime)
+                                    syn::GenericArgument::Lifetime(param.lifetime.clone())
                                 }
                                 syn::GenericParam::Const(_) => {
                                     panic!("Serde does not support const generics yet");
                                 }
                             })
                             .collect(),
                         gt_token: Default::default(),
                     },
--- a/third_party/rust/serde_derive/src/de.rs
+++ b/third_party/rust/serde_derive/src/de.rs
@@ -1,42 +1,42 @@
 // Copyright 2017 Serde Developers
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use proc_macro2::{Literal, Span};
-use quote::{ToTokens, Tokens};
+use proc_macro2::{Literal, Span, TokenStream};
+use quote::ToTokens;
 use syn::punctuated::Punctuated;
 use syn::spanned::Spanned;
 use syn::{self, Ident, Index, Member};
 
 use bound;
 use fragment::{Expr, Fragment, Match, Stmts};
 use internals::ast::{Container, Data, Field, Style, Variant};
-use internals::{attr, Ctxt};
+use internals::{attr, Ctxt, Derive};
 use pretend;
 use try;
 
 #[cfg(feature = "deserialize_in_place")]
 use internals::ast::Repr;
 
 
 use std::collections::BTreeSet;
 
-pub fn expand_derive_deserialize(input: &syn::DeriveInput) -> Result<Tokens, String> {
+pub fn expand_derive_deserialize(input: &syn::DeriveInput) -> Result<TokenStream, String> {
     let ctxt = Ctxt::new();
-    let cont = Container::from_ast(&ctxt, input);
+    let cont = Container::from_ast(&ctxt, input, Derive::Deserialize);
     precondition(&ctxt, &cont);
     try!(ctxt.check());
 
-    let ident = cont.ident;
+    let ident = &cont.ident;
     let params = Parameters::new(&cont);
     let (de_impl_generics, _, ty_generics, where_clause) = split_with_de_lifetime(&params);
     let dummy_const = Ident::new(
         &format!("_IMPL_DESERIALIZE_FOR_{}", ident),
         Span::call_site(),
     );
     let body = Stmts(deserialize_body(&cont, &params));
     let delife = params.borrowed.de_lifetime();
@@ -130,38 +130,38 @@ struct Parameters {
 
     /// At least one field has a serde(getter) attribute, implying that the
     /// remote type has a private field.
     has_getter: bool,
 }
 
 impl Parameters {
     fn new(cont: &Container) -> Self {
-        let local = cont.ident;
+        let local = cont.ident.clone();
         let this = match cont.attrs.remote() {
             Some(remote) => remote.clone(),
-            None => cont.ident.into(),
+            None => cont.ident.clone().into(),
         };
         let borrowed = borrowed_lifetimes(cont);
         let generics = build_generics(cont, &borrowed);
         let has_getter = cont.data.has_getter();
 
         Parameters {
             local: local,
             this: this,
             generics: generics,
             borrowed: borrowed,
             has_getter: has_getter,
         }
     }
 
     /// Type name to use in error messages and `&'static str` arguments to
     /// various Deserializer methods.
-    fn type_name(&self) -> &str {
-        self.this.segments.last().unwrap().value().ident.as_ref()
+    fn type_name(&self) -> String {
+        self.this.segments.last().unwrap().value().ident.to_string()
     }
 }
 
 // All the generics in the input, plus a bound `T: Deserialize` for each generic
 // field type that will be deserialized by us, plus a bound `T: Default` for
 // each generic field type that will be set to a default value.
 fn build_generics(cont: &Container, borrowed: &BorrowedLifetimes) -> syn::Generics {
     let generics = bound::without_defaults(cont.generics);
@@ -203,17 +203,18 @@ fn build_generics(cont: &Container, borr
 // belong to a variant with a `skip_deserializing` or `deserialize_with`
 // attribute, are not deserialized by us so we do not generate a bound. Fields
 // with a `bound` attribute specify their own bound so we do not generate one.
 // All other fields may need a `T: Deserialize` bound where T is the type of the
 // field.
 fn needs_deserialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool {
     !field.skip_deserializing() && field.deserialize_with().is_none() && field.de_bound().is_none()
         && variant.map_or(true, |variant| {
-            !variant.skip_deserializing() && variant.deserialize_with().is_none()
+            !variant.skip_deserializing()
+                && variant.deserialize_with().is_none()
                 && variant.de_bound().is_none()
         })
 }
 
 // Fields with a `default` attribute (not `default=...`), and fields with a
 // `skip_deserializing` attribute that do not also have `default=...`.
 fn requires_default(field: &attr::Field, _variant: Option<&attr::Variant>) -> bool {
     if let attr::Default::Default = *field.default() {
@@ -269,31 +270,27 @@ fn borrowed_lifetimes(cont: &Container) 
     if lifetimes.iter().any(|b| b.to_string() == "'static") {
         BorrowedLifetimes::Static
     } else {
         BorrowedLifetimes::Borrowed(lifetimes)
     }
 }
 
 fn deserialize_body(cont: &Container, params: &Parameters) -> Fragment {
-    if let Some(type_from) = cont.attrs.type_from() {
+    if cont.attrs.transparent() {
+        deserialize_transparent(cont, params)
+    } else if let Some(type_from) = cont.attrs.type_from() {
         deserialize_from(type_from)
     } else if let attr::Identifier::No = cont.attrs.identifier() {
         match cont.data {
             Data::Enum(_, ref variants) => deserialize_enum(params, variants, &cont.attrs),
             Data::Struct(Style::Struct, ref fields) => {
-                if fields.iter().any(|field| field.ident.is_none()) {
-                    panic!("struct has unnamed fields");
-                }
                 deserialize_struct(None, params, fields, &cont.attrs, None, &Untagged::No)
             }
             Data::Struct(Style::Tuple, ref fields) | Data::Struct(Style::Newtype, ref fields) => {
-                if fields.iter().any(|field| field.ident.is_some()) {
-                    panic!("tuple struct has named fields");
-                }
                 deserialize_tuple(None, params, fields, &cont.attrs, None)
             }
             Data::Struct(Style::Unit, _) => deserialize_unit_struct(params, &cont.attrs),
         }
     } else {
         match cont.data {
             Data::Enum(_, ref variants) => {
                 deserialize_custom_identifier(params, variants, &cont.attrs)
@@ -304,18 +301,21 @@ fn deserialize_body(cont: &Container, pa
 }
 
 #[cfg(feature = "deserialize_in_place")]
 fn deserialize_in_place_body(cont: &Container, params: &Parameters) -> Option<Stmts> {
     // Only remote derives have getters, and we do not generate
     // deserialize_in_place for remote derives.
     assert!(!params.has_getter);
 
-    if cont.attrs.type_from().is_some() || cont.attrs.identifier().is_some()
-        || cont.data
+    if cont.attrs.transparent()
+        || cont.attrs.type_from().is_some()
+        || cont.attrs.identifier().is_some()
+        || cont
+            .data
             .all_fields()
             .all(|f| f.attrs.deserialize_with().is_some())
     {
         return None;
     }
 
     let code = match cont.data {
         Data::Struct(Style::Struct, ref fields) => {
@@ -356,16 +356,51 @@ fn deserialize_in_place_body(cont: &Cont
     Some(Stmts(fn_deserialize_in_place))
 }
 
 #[cfg(not(feature = "deserialize_in_place"))]
 fn deserialize_in_place_body(_cont: &Container, _params: &Parameters) -> Option<Stmts> {
     None
 }
 
+fn deserialize_transparent(cont: &Container, params: &Parameters) -> Fragment {
+    let fields = match cont.data {
+        Data::Struct(_, ref fields) => fields,
+        Data::Enum(_, _) => unreachable!(),
+    };
+
+    let this = &params.this;
+    let transparent_field = fields.iter().find(|f| f.attrs.transparent()).unwrap();
+
+    let path = match transparent_field.attrs.deserialize_with() {
+        Some(path) => quote!(#path),
+        None => quote!(_serde::Deserialize::deserialize),
+    };
+
+    let assign = fields.iter().map(|field| {
+        let member = &field.member;
+        if field as *const Field == transparent_field as *const Field {
+            quote!(#member: __transparent)
+        } else {
+            let value = match *field.attrs.default() {
+                attr::Default::Default => quote!(_serde::export::Default::default()),
+                attr::Default::Path(ref path) => quote!(#path()),
+                attr::Default::None => quote!(_serde::export::PhantomData),
+            };
+            quote!(#member: #value)
+        }
+    });
+
+    quote_block! {
+        _serde::export::Result::map(
+            #path(__deserializer),
+            |__transparent| #this { #(#assign),* })
+    }
+}
+
 fn deserialize_from(type_from: &syn::Type) -> Fragment {
     quote_block! {
         _serde::export::Result::map(
             <#type_from as _serde::Deserialize>::deserialize(__deserializer),
             _serde::export::From::from)
     }
 }
 
@@ -394,42 +429,42 @@ fn deserialize_unit_struct(params: &Para
             }
         }
 
         _serde::Deserializer::deserialize_unit_struct(__deserializer, #type_name, __Visitor)
     }
 }
 
 fn deserialize_tuple(
-    variant_ident: Option<syn::Ident>,
+    variant_ident: Option<&syn::Ident>,
     params: &Parameters,
     fields: &[Field],
     cattrs: &attr::Container,
-    deserializer: Option<Tokens>,
+    deserializer: Option<TokenStream>,
 ) -> Fragment {
     let this = &params.this;
     let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
         split_with_de_lifetime(params);
     let delife = params.borrowed.de_lifetime();
 
     assert!(!cattrs.has_flatten());
 
     // If there are getters (implying private fields), construct the local type
     // and use an `Into` conversion to get the remote type. If there are no
     // getters then construct the target type directly.
     let construct = if params.has_getter {
-        let local = params.local;
+        let local = &params.local;
         quote!(#local)
     } else {
         quote!(#this)
     };
 
     let is_enum = variant_ident.is_some();
     let type_path = match variant_ident {
-        Some(variant_ident) => quote!(#construct::#variant_ident),
+        Some(ref variant_ident) => quote!(#construct::#variant_ident),
         None => construct,
     };
     let expecting = match variant_ident {
         Some(variant_ident) => format!("tuple variant {}::{}", params.type_name(), variant_ident),
         None => format!("tuple struct {}", params.type_name()),
     };
 
     let nfields = fields.len();
@@ -498,17 +533,17 @@ fn deserialize_tuple(
 }
 
 #[cfg(feature = "deserialize_in_place")]
 fn deserialize_tuple_in_place(
     variant_ident: Option<syn::Ident>,
     params: &Parameters,
     fields: &[Field],
     cattrs: &attr::Container,
-    deserializer: Option<Tokens>,
+    deserializer: Option<TokenStream>,
 ) -> Fragment {
     let this = &params.this;
     let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
         split_with_de_lifetime(params);
     let delife = params.borrowed.de_lifetime();
 
     assert!(!cattrs.has_flatten());
 
@@ -591,27 +626,25 @@ fn deserialize_tuple_in_place(
             }
         }
 
         #dispatch
     }
 }
 
 fn deserialize_seq(
-    type_path: &Tokens,
+    type_path: &TokenStream,
     params: &Parameters,
     fields: &[Field],
     is_struct: bool,
     cattrs: &attr::Container,
     expecting: &str,
 ) -> Fragment {
     let vars = (0..fields.len()).map(field_i as fn(_) -> _);
 
-    // XXX: do we need an error for flattening here?
-
     let deserialized_count = fields
         .iter()
         .filter(|field| !field.attrs.skip_deserializing())
         .count();
     let expecting = if deserialized_count == 1 {
         format!("{} with 1 element", expecting)
     } else {
         format!("{} with {} elements", expecting, deserialized_count)
@@ -652,17 +685,17 @@ fn deserialize_seq(
                 };
             };
             index_in_seq += 1;
             assign
         }
     });
 
     let mut result = if is_struct {
-        let names = fields.iter().map(|f| &f.ident);
+        let names = fields.iter().map(|f| &f.member);
         quote! {
             #type_path { #( #names: #vars ),* }
         }
     } else {
         quote! {
             #type_path ( #(#vars),* )
         }
     };
@@ -694,93 +727,73 @@ fn deserialize_seq(
         _serde::export::Ok(#result)
     }
 }
 
 #[cfg(feature = "deserialize_in_place")]
 fn deserialize_seq_in_place(
     params: &Parameters,
     fields: &[Field],
-    increment_fields: bool,
+    _increment_fields: bool,  // FIXME: Not needed anymore?
     cattrs: &attr::Container,
     expecting: &str,
 ) -> Fragment {
     // For enums there's a secret `tag` field at the start which we shouldn't touch,
-    // so we need to bump all the indices up by 1.
-    let index_names = if increment_fields {
-        (0..fields.len())
-    } else {
-        (1..fields.len() + 1)
-    };
-    let vars = index_names.map(field_i as fn(_) -> _);
-
-    // XXX: do we need an error for flattening here?
-
     let deserialized_count = fields
         .iter()
         .filter(|field| !field.attrs.skip_deserializing())
         .count();
     let expecting = if deserialized_count == 1 {
         format!("{} with 1 element", expecting)
     } else {
         format!("{} with {} elements", expecting, deserialized_count)
     };
 
     let mut index_in_seq = 0usize;
-    let write_values = vars.clone()
-        .zip(fields)
-        .enumerate()
-        .map(|(field_index, (_, field))| {
-            // If there's no field name, assume we're a tuple-struct and use a numeric index
-            let field_name = field.ident.map(Member::Named).unwrap_or_else(|| {
-                Member::Unnamed(Index {
-                    index: field_index as u32,
-                    span: Span::call_site(),
-                })
-            });
-
-            if field.attrs.skip_deserializing() {
-                let default = Expr(expr_is_missing(field, cattrs));
-                quote! {
-                    self.place.#field_name = #default;
-                }
-            } else {
-                let return_invalid_length = quote! {
-                    return _serde::export::Err(_serde::de::Error::invalid_length(#index_in_seq, &#expecting));
-                };
-                let write = match field.attrs.deserialize_with() {
-                    None => {
-                        quote! {
-                            if let _serde::export::None = try!(_serde::de::SeqAccess::next_element_seed(&mut __seq,
-                                _serde::private::de::InPlaceSeed(&mut self.place.#field_name)))
-                            {
-                                #return_invalid_length
-                            }
+    let write_values = fields.iter().map(|field| {
+        let member = &field.member;
+
+        if field.attrs.skip_deserializing() {
+            let default = Expr(expr_is_missing(field, cattrs));
+            quote! {
+                self.place.#member = #default;
+            }
+        } else {
+            let return_invalid_length = quote! {
+                return _serde::export::Err(_serde::de::Error::invalid_length(#index_in_seq, &#expecting));
+            };
+            let write = match field.attrs.deserialize_with() {
+                None => {
+                    quote! {
+                        if let _serde::export::None = try!(_serde::de::SeqAccess::next_element_seed(&mut __seq,
+                            _serde::private::de::InPlaceSeed(&mut self.place.#member)))
+                        {
+                            #return_invalid_length
                         }
                     }
-                    Some(path) => {
-                        let (wrapper, wrapper_ty) =
-                            wrap_deserialize_field_with(params, field.ty, path);
-                        quote!({
+                }
+                Some(path) => {
+                    let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path);
+                    quote!({
                             #wrapper
                             match try!(_serde::de::SeqAccess::next_element::<#wrapper_ty>(&mut __seq)) {
                                 _serde::export::Some(__wrap) => {
-                                    self.place.#field_name = __wrap.value;
+                                    self.place.#member = __wrap.value;
                                 }
                                 _serde::export::None => {
                                     #return_invalid_length
                                 }
                             }
                         })
-                    }
-                };
-                index_in_seq += 1;
-                write
-            }
-        });
+                }
+            };
+            index_in_seq += 1;
+            write
+        }
+    });
 
     let this = &params.this;
     let (_, ty_generics, _) = params.generics.split_for_impl();
     let let_default = match *cattrs.default() {
         attr::Default::Default => Some(quote!(
             let __default: #this #ty_generics  = _serde::export::Default::default();
         )),
         attr::Default::Path(ref path) => Some(quote!(
@@ -795,17 +808,21 @@ fn deserialize_seq_in_place(
 
     quote_block! {
         #let_default
         #(#write_values)*
         _serde::export::Ok(())
     }
 }
 
-fn deserialize_newtype_struct(type_path: &Tokens, params: &Parameters, field: &Field) -> Tokens {
+fn deserialize_newtype_struct(
+    type_path: &TokenStream,
+    params: &Parameters,
+    field: &Field,
+) -> TokenStream {
     let delife = params.borrowed.de_lifetime();
     let field_ty = field.ty;
 
     let value = match field.attrs.deserialize_with() {
         None => {
             quote! {
                 try!(<#field_ty as _serde::Deserialize>::deserialize(__e))
             }
@@ -833,17 +850,17 @@ fn deserialize_newtype_struct(type_path:
         {
             let __field0: #field_ty = #value;
             _serde::export::Ok(#result)
         }
     }
 }
 
 #[cfg(feature = "deserialize_in_place")]
-fn deserialize_newtype_struct_in_place(params: &Parameters, field: &Field) -> Tokens {
+fn deserialize_newtype_struct_in_place(params: &Parameters, field: &Field) -> TokenStream {
     // We do not generate deserialize_in_place if every field has a deserialize_with.
     assert!(field.attrs.deserialize_with().is_none());
 
     let delife = params.borrowed.de_lifetime();
 
     quote! {
         #[inline]
         fn visit_newtype_struct<__E>(self, __e: __E) -> _serde::export::Result<Self::Value, __E::Error>
@@ -856,42 +873,42 @@ fn deserialize_newtype_struct_in_place(p
 }
 
 enum Untagged {
     Yes,
     No,
 }
 
 fn deserialize_struct(
-    variant_ident: Option<syn::Ident>,
+    variant_ident: Option<&syn::Ident>,
     params: &Parameters,
     fields: &[Field],
     cattrs: &attr::Container,
-    deserializer: Option<Tokens>,
+    deserializer: Option<TokenStream>,
     untagged: &Untagged,
 ) -> Fragment {
     let is_enum = variant_ident.is_some();
 
     let this = &params.this;
     let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
         split_with_de_lifetime(params);
     let delife = params.borrowed.de_lifetime();
 
     // If there are getters (implying private fields), construct the local type
     // and use an `Into` conversion to get the remote type. If there are no
     // getters then construct the target type directly.
     let construct = if params.has_getter {
-        let local = params.local;
+        let local = &params.local;
         quote!(#local)
     } else {
         quote!(#this)
     };
 
     let type_path = match variant_ident {
-        Some(variant_ident) => quote!(#construct::#variant_ident),
+        Some(ref variant_ident) => quote!(#construct::#variant_ident),
         None => construct,
     };
     let expecting = match variant_ident {
         Some(variant_ident) => format!("struct variant {}::{}", params.type_name(), variant_ident),
         None => format!("struct {}", params.type_name()),
     };
 
     let visit_seq = Stmts(deserialize_seq(
@@ -1010,17 +1027,17 @@ fn deserialize_struct(
 }
 
 #[cfg(feature = "deserialize_in_place")]
 fn deserialize_struct_in_place(
     variant_ident: Option<syn::Ident>,
     params: &Parameters,
     fields: &[Field],
     cattrs: &attr::Container,
-    deserializer: Option<Tokens>,
+    deserializer: Option<TokenStream>,
 ) -> Option<Fragment> {
     let is_enum = variant_ident.is_some();
 
     // for now we do not support in_place deserialization for structs that
     // are represented as map.
     if cattrs.has_flatten() {
         return None;
     }
@@ -1258,17 +1275,17 @@ fn deserialize_externally_tagged_enum(
 #[cfg(feature = "deserialize_in_place")]
 fn deserialize_externally_tagged_enum_in_place(
     params: &Parameters,
     repr: &Repr,
     variants: &[Variant],
     cattrs: &attr::Container,
 ) -> Option<Fragment> {
     let int_repr = repr.get_stable_rust_enum_layout().map(|int_repr| {
-        let int_repr = Ident::from(int_repr);
+        let int_repr = Ident::new(int_repr, Span::call_site());
         quote!(#[repr(#int_repr)])
     });
 
     let unit_variant = variants.iter().position(|variant| is_unit(variant));
     let non_unit_variant = variants.iter().enumerate().find(|&(_, variant)| !is_unit(variant));
 
     // We need an int_repr, unit variant, and a non-unit variant to proceed
     if int_repr.is_none() || unit_variant.is_none() || non_unit_variant.is_none() {
@@ -1648,17 +1665,17 @@ fn deserialize_adjacently_tagged_enum(
             })
         };
         let arms = variants
             .iter()
             .enumerate()
             .filter(|&(_, variant)| !variant.attrs.skip_deserializing() && is_unit(variant))
             .map(|(i, variant)| {
                 let variant_index = field_i(i);
-                let variant_ident = variant.ident;
+                let variant_ident = &variant.ident;
                 quote! {
                     __Field::#variant_index => _serde::export::Ok(#this::#variant_ident),
                 }
             });
         missing_content = quote! {
             match __field {
                 #(#arms)*
                 #fallthrough
@@ -1912,17 +1929,17 @@ fn deserialize_externally_tagged_variant
         let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path);
         return quote_block! {
             #wrapper
             _serde::export::Result::map(
                 _serde::de::VariantAccess::newtype_variant::<#wrapper_ty>(__variant), #unwrap_fn)
         };
     }
 
-    let variant_ident = variant.ident;
+    let variant_ident = &variant.ident;
 
     match variant.style {
         Style::Unit => {
             let this = &params.this;
             quote_block! {
                 try!(_serde::de::VariantAccess::unit_variant(__variant));
                 _serde::export::Ok(#this::#variant_ident)
             }
@@ -1970,42 +1987,42 @@ fn deserialize_externally_tagged_variant
             quote_block! {
                 try!(_serde::de::VariantAccess::unit_variant(__variant));
             }
         }
         Style::Newtype => {
             deserialize_externally_tagged_newtype_variant_in_place(variant_ident, params, &variant.fields[0])
         }
         Style::Tuple => {
-            deserialize_tuple_in_place(Some(*variant_ident), params, &variant.fields, cattrs, None)
+            deserialize_tuple_in_place(Some(variant_ident.clone()), params, &variant.fields, cattrs, None)
         }
         Style::Struct => {
             unimplemented!()
             // deserialize_struct(None, params, &variant.fields, cattrs, None, Untagged::No).0
         }
     }
 }
 
 fn deserialize_internally_tagged_variant(
     params: &Parameters,
     variant: &Variant,
     cattrs: &attr::Container,
-    deserializer: Tokens,
+    deserializer: TokenStream,
 ) -> Fragment {
     if variant.attrs.deserialize_with().is_some() {
         return deserialize_untagged_variant(params, variant, cattrs, deserializer);
     }
 
-    let variant_ident = variant.ident;
+    let variant_ident = &variant.ident;
 
     match variant.style {
         Style::Unit => {
             let this = &params.this;
             let type_name = params.type_name();
-            let variant_name = variant.ident.as_ref();
+            let variant_name = variant.ident.to_string();
             quote_block! {
                 try!(_serde::Deserializer::deserialize_any(#deserializer, _serde::private::de::InternallyTaggedUnitVisitor::new(#type_name, #variant_name)));
                 _serde::export::Ok(#this::#variant_ident)
             }
         }
         Style::Newtype => deserialize_untagged_newtype_variant(
             variant_ident,
             params,
@@ -2023,34 +2040,34 @@ fn deserialize_internally_tagged_variant
         Style::Tuple => unreachable!("checked in serde_derive_internals"),
     }
 }
 
 fn deserialize_untagged_variant(
     params: &Parameters,
     variant: &Variant,
     cattrs: &attr::Container,
-    deserializer: Tokens,
+    deserializer: TokenStream,
 ) -> Fragment {
     if let Some(path) = variant.attrs.deserialize_with() {
         let (wrapper, wrapper_ty, unwrap_fn) = wrap_deserialize_variant_with(params, variant, path);
         return quote_block! {
             #wrapper
             _serde::export::Result::map(
                 <#wrapper_ty as _serde::Deserialize>::deserialize(#deserializer), #unwrap_fn)
         };
     }
 
-    let variant_ident = variant.ident;
+    let variant_ident = &variant.ident;
 
     match variant.style {
         Style::Unit => {
             let this = &params.this;
             let type_name = params.type_name();
-            let variant_name = variant.ident.as_ref();
+            let variant_name = variant.ident.to_string();
             quote_expr! {
                 match _serde::Deserializer::deserialize_any(
                     #deserializer,
                     _serde::private::de::UntaggedUnitVisitor::new(#type_name, #variant_name)
                 ) {
                     _serde::export::Ok(()) => _serde::export::Ok(#this::#variant_ident),
                     _serde::export::Err(__err) => _serde::export::Err(__err),
                 }
@@ -2076,17 +2093,17 @@ fn deserialize_untagged_variant(
             cattrs,
             Some(deserializer),
             &Untagged::Yes,
         ),
     }
 }
 
 fn deserialize_externally_tagged_newtype_variant(
-    variant_ident: syn::Ident,
+    variant_ident: &syn::Ident,
     params: &Parameters,
     field: &Field,
 ) -> Fragment {
     let this = &params.this;
     match field.attrs.deserialize_with() {
         None => {
             let field_ty = field.ty;
             quote_expr! {
@@ -2131,20 +2148,20 @@ fn deserialize_externally_tagged_newtype
                     |__wrapper| #this::#variant_ident(__wrapper.value))
             }
             */
         }
     }
 }
 
 fn deserialize_untagged_newtype_variant(
-    variant_ident: syn::Ident,
+    variant_ident: &syn::Ident,
     params: &Parameters,
     field: &Field,
-    deserializer: &Tokens,
+    deserializer: &TokenStream,
 ) -> Fragment {
     let this = &params.this;
     let field_ty = field.ty;
     match field.attrs.deserialize_with() {
         None => {
             quote_expr! {
                 _serde::export::Result::map(
                     <#field_ty as _serde::Deserialize>::deserialize(#deserializer),
@@ -2231,17 +2248,17 @@ fn deserialize_custom_identifier(
         attr::Identifier::Field => false,
         attr::Identifier::No => unreachable!(),
     };
 
     let this = &params.this;
     let this = quote!(#this);
 
     let (ordinary, fallthrough) = if let Some(last) = variants.last() {
-        let last_ident = last.ident;
+        let last_ident = &last.ident;
         if last.attrs.other() {
             let ordinary = &variants[..variants.len() - 1];
             let fallthrough = quote!(_serde::export::Ok(#this::#last_ident));
             (ordinary, Some(fallthrough))
         } else if let Style::Newtype = last.style {
             let ordinary = &variants[..variants.len() - 1];
             let deserializer = quote!(_serde::private::de::IdentifierDeserializer::from(__value));
             let fallthrough = quote! {
@@ -2254,17 +2271,22 @@ fn deserialize_custom_identifier(
             (variants, None)
         }
     } else {
         (variants, None)
     };
 
     let names_idents: Vec<_> = ordinary
         .iter()
-        .map(|variant| (variant.attrs.name().deserialize_name(), variant.ident))
+        .map(|variant| {
+            (
+                variant.attrs.name().deserialize_name(),
+                variant.ident.clone(),
+            )
+        })
         .collect();
 
     let names = names_idents.iter().map(|&(ref name, _)| name);
 
     let names_const = if fallthrough.is_some() {
         None
     } else if is_variant {
         let variants = quote! {
@@ -2307,20 +2329,20 @@ fn deserialize_custom_identifier(
             marker: _serde::export::PhantomData::<#this #ty_generics>,
             lifetime: _serde::export::PhantomData,
         };
         _serde::Deserializer::deserialize_identifier(__deserializer, __visitor)
     }
 }
 
 fn deserialize_identifier(
-    this: &Tokens,
+    this: &TokenStream,
     fields: &[(String, Ident)],
     is_variant: bool,
-    fallthrough: Option<Tokens>,
+    fallthrough: Option<TokenStream>,
     collect_other_fields: bool,
 ) -> Fragment {
     let field_strs = fields.iter().map(|&(ref name, _)| name);
     let field_borrowed_strs = fields.iter().map(|&(ref name, _)| name);
     let field_bytes = fields
         .iter()
         .map(|&(ref name, _)| Literal::byte_string(name.as_bytes()));
     let field_borrowed_bytes = fields
@@ -2564,17 +2586,17 @@ fn deserialize_identifier(
                     #fallthrough_arm
                 }
             }
         }
     }
 }
 
 fn deserialize_struct_as_struct_visitor(
-    struct_path: &Tokens,
+    struct_path: &TokenStream,
     params: &Parameters,
     fields: &[Field],
     cattrs: &attr::Container,
 ) -> (Fragment, Option<Fragment>, Fragment) {
     assert!(!cattrs.has_flatten());
 
     let field_names_idents: Vec<_> = fields
         .iter()
@@ -2593,17 +2615,17 @@ fn deserialize_struct_as_struct_visitor(
     let field_visitor = deserialize_generated_identifier(&field_names_idents, cattrs, false);
 
     let visit_map = deserialize_map(struct_path, params, fields, cattrs);
 
     (field_visitor, Some(fields_stmt), visit_map)
 }
 
 fn deserialize_struct_as_map_visitor(
-    struct_path: &Tokens,
+    struct_path: &TokenStream,
     params: &Parameters,
     fields: &[Field],
     cattrs: &attr::Container,
 ) -> (Fragment, Option<Fragment>, Fragment) {
     let field_names_idents: Vec<_> = fields
         .iter()
         .enumerate()
         .filter(|&(_, field)| !field.attrs.skip_deserializing() && !field.attrs.flatten())
@@ -2613,17 +2635,17 @@ fn deserialize_struct_as_map_visitor(
     let field_visitor = deserialize_generated_identifier(&field_names_idents, cattrs, false);
 
     let visit_map = deserialize_map(struct_path, params, fields, cattrs);
 
     (field_visitor, None, visit_map)
 }
 
 fn deserialize_map(
-    struct_path: &Tokens,
+    struct_path: &TokenStream,
     params: &Parameters,
     fields: &[Field],
     cattrs: &attr::Container,
 ) -> Fragment {
     // Create the field names for the fields.
     let fields_names: Vec<_> = fields
         .iter()
         .enumerate()
@@ -2768,22 +2790,22 @@ fn deserialize_map(
                 }
             }
         })
     } else {
         None
     };
 
     let result = fields_names.iter().map(|&(field, ref name)| {
-        let ident = field.ident.expect("struct contains unnamed fields");
+        let member = &field.member;
         if field.attrs.skip_deserializing() {
             let value = Expr(expr_is_missing(field, cattrs));
-            quote!(#ident: #value)
+            quote!(#member: #value)
         } else {
-            quote!(#ident: #name)
+            quote!(#member: #name)
         }
     });
 
     let let_default = match *cattrs.default() {
         attr::Default::Default => Some(quote!(
             let __default: Self::Value = _serde::export::Default::default();
         )),
         attr::Default::Path(ref path) => Some(quote!(
@@ -2878,29 +2900,29 @@ fn deserialize_map_in_place(
         });
 
     // Match arms to extract a value for a field.
     let value_arms_from = fields_names
         .iter()
         .filter(|&&(field, _)| !field.attrs.skip_deserializing())
         .map(|&(field, ref name)| {
             let deser_name = field.attrs.name().deserialize_name();
-            let field_name = field.ident;
+            let member = &field.member;
 
             let visit = match field.attrs.deserialize_with() {
                 None => {
                     quote! {
-                        try!(_serde::de::MapAccess::next_value_seed(&mut __map, _serde::private::de::InPlaceSeed(&mut self.place.#field_name)))
+                        try!(_serde::de::MapAccess::next_value_seed(&mut __map, _serde::private::de::InPlaceSeed(&mut self.place.#member)))
                     }
                 }
                 Some(path) => {
                     let (wrapper, wrapper_ty) = wrap_deserialize_field_with(params, field.ty, path);
                     quote!({
                         #wrapper
-                        self.place.#field_name = try!(_serde::de::MapAccess::next_value::<#wrapper_ty>(&mut __map)).value
+                        self.place.#member = try!(_serde::de::MapAccess::next_value::<#wrapper_ty>(&mut __map)).value
                     })
                 }
             };
             quote! {
                 __Field::#name => {
                     if #name {
                         return _serde::export::Err(<__A::Error as _serde::de::Error>::duplicate_field(#deser_name));
                     }
@@ -2942,31 +2964,32 @@ fn deserialize_map_in_place(
 
     let check_flags = fields_names
         .iter()
         .filter(|&&(field, _)| !field.attrs.skip_deserializing())
         .map(|&(field, ref name)| {
             let missing_expr = expr_is_missing(field, cattrs);
             // If missing_expr unconditionally returns an error, don't try
             // to assign its value to self.place.
-            if field.attrs.default().is_none() && cattrs.default().is_none()
+            if field.attrs.default().is_none()
+                && cattrs.default().is_none()
                 && field.attrs.deserialize_with().is_some()
             {
                 let missing_expr = Stmts(missing_expr);
                 quote! {
                     if !#name {
                         #missing_expr;
                     }
                 }
             } else {
-                let field_name = field.ident;
+                let member = &field.member;
                 let missing_expr = Expr(missing_expr);
                 quote! {
                     if !#name {
-                        self.place.#field_name = #missing_expr;
+                        self.place.#member = #missing_expr;
                     };
                 }
             }
         });
 
     let this = &params.this;
     let (_, _, ty_generics, _) = split_with_de_lifetime(params);
 
@@ -3000,19 +3023,19 @@ fn deserialize_map_in_place(
 fn field_i(i: usize) -> Ident {
     Ident::new(&format!("__field{}", i), Span::call_site())
 }
 
 /// This function wraps the expression in `#[serde(deserialize_with = "...")]`
 /// in a trait to prevent it from accessing the internal `Deserialize` state.
 fn wrap_deserialize_with(
     params: &Parameters,
-    value_ty: &Tokens,
+    value_ty: &TokenStream,
     deserialize_with: &syn::ExprPath,
-) -> (Tokens, Tokens) {
+) -> (TokenStream, TokenStream) {
     let this = &params.this;
     let (de_impl_generics, de_ty_generics, ty_generics, where_clause) =
         split_with_de_lifetime(params);
     let delife = params.borrowed.de_lifetime();
 
     let wrapper = quote! {
         struct __DeserializeWith #de_impl_generics #where_clause {
             value: #value_ty,
@@ -3038,52 +3061,49 @@ fn wrap_deserialize_with(
 
     (wrapper, wrapper_ty)
 }
 
 fn wrap_deserialize_field_with(
     params: &Parameters,
     field_ty: &syn::Type,
     deserialize_with: &syn::ExprPath,
-) -> (Tokens, Tokens) {
+) -> (TokenStream, TokenStream) {
     wrap_deserialize_with(params, &quote!(#field_ty), deserialize_with)
 }
 
 fn wrap_deserialize_variant_with(
     params: &Parameters,
     variant: &Variant,
     deserialize_with: &syn::ExprPath,
-) -> (Tokens, Tokens, Tokens) {
+) -> (TokenStream, TokenStream, TokenStream) {
     let this = &params.this;
-    let variant_ident = variant.ident;
+    let variant_ident = &variant.ident;
 
     let field_tys = variant.fields.iter().map(|field| field.ty);
     let (wrapper, wrapper_ty) =
         wrap_deserialize_with(params, &quote!((#(#field_tys),*)), deserialize_with);
 
     let field_access = (0..variant.fields.len()).map(|n| {
         Member::Unnamed(Index {
             index: n as u32,
             span: Span::call_site(),
         })
     });
     let unwrap_fn = match variant.style {
         Style::Struct if variant.fields.len() == 1 => {
-            let field_ident = variant.fields[0].ident.unwrap();
+            let member = &variant.fields[0].member;
             quote! {
-                |__wrap| #this::#variant_ident { #field_ident: __wrap.value }
+                |__wrap| #this::#variant_ident { #member: __wrap.value }
             }
         }
         Style::Struct => {
-            let field_idents = variant
-                .fields
-                .iter()
-                .map(|field| field.ident.as_ref().unwrap());
+            let members = variant.fields.iter().map(|field| &field.member);
             quote! {
-                |__wrap| #this::#variant_ident { #(#field_idents: __wrap.value.#field_access),* }
+                |__wrap| #this::#variant_ident { #(#members: __wrap.value.#field_access),* }
             }
         }
         Style::Tuple => quote! {
             |__wrap| #this::#variant_ident(#(__wrap.value.#field_access),*)
         },
         Style::Newtype => quote! {
             |__wrap| #this::#variant_ident(__wrap.value)
         },
@@ -3103,18 +3123,18 @@ fn expr_is_missing(field: &Field, cattrs
         attr::Default::Path(ref path) => {
             return quote_expr!(#path());
         }
         attr::Default::None => { /* below */ }
     }
 
     match *cattrs.default() {
         attr::Default::Default | attr::Default::Path(_) => {
-            let ident = field.ident;
-            return quote_expr!(__default.#ident);
+            let member = &field.member;
+            return quote_expr!(__default.#member);
         }
         attr::Default::None => { /* below */ }
     }
 
     let name = field.attrs.name().deserialize_name();
     match field.attrs.deserialize_with() {
         None => {
             let span = field.original.span();
@@ -3131,45 +3151,45 @@ fn expr_is_missing(field: &Field, cattrs
     }
 }
 
 struct DeImplGenerics<'a>(&'a Parameters);
 #[cfg(feature = "deserialize_in_place")]
 struct InPlaceImplGenerics<'a>(&'a Parameters);
 
 impl<'a> ToTokens for DeImplGenerics<'a> {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         let mut generics = self.0.generics.clone();
         if let Some(de_lifetime) = self.0.borrowed.de_lifetime_def() {
             generics.params = Some(syn::GenericParam::Lifetime(de_lifetime))
                 .into_iter()
                 .chain(generics.params)
                 .collect();
         }
         let (impl_generics, _, _) = generics.split_for_impl();
         impl_generics.to_tokens(tokens);
     }
 }
 
 #[cfg(feature = "deserialize_in_place")]
 impl<'a> ToTokens for InPlaceImplGenerics<'a> {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         let place_lifetime = place_lifetime();
         let mut generics = self.0.generics.clone();
 
         // Add lifetime for `&'place mut Self, and `'a: 'place`
         for param in &mut generics.params {
             match *param {
                 syn::GenericParam::Lifetime(ref mut param) => {
-                    param.bounds.push(place_lifetime.lifetime);
+                    param.bounds.push(place_lifetime.lifetime.clone());
                 }
                 syn::GenericParam::Type(ref mut param) => {
-                    param
-                        .bounds
-                        .push(syn::TypeParamBound::Lifetime(place_lifetime.lifetime));
+                    param.bounds.push(syn::TypeParamBound::Lifetime(
+                        place_lifetime.lifetime.clone(),
+                    ));
                 }
                 syn::GenericParam::Const(_) => {}
             }
         }
         generics.params = Some(syn::GenericParam::Lifetime(place_lifetime))
             .into_iter()
             .chain(generics.params)
             .collect();
@@ -3191,17 +3211,17 @@ impl<'a> DeImplGenerics<'a> {
     }
 }
 
 struct DeTypeGenerics<'a>(&'a Parameters);
 #[cfg(feature = "deserialize_in_place")]
 struct InPlaceTypeGenerics<'a>(&'a Parameters);
 
 impl<'a> ToTokens for DeTypeGenerics<'a> {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         let mut generics = self.0.generics.clone();
         if self.0.borrowed.de_lifetime_def().is_some() {
             let def = syn::LifetimeDef {
                 attrs: Vec::new(),
                 lifetime: syn::Lifetime::new("'de", Span::call_site()),
                 colon_token: None,
                 bounds: Punctuated::new(),
             };
@@ -3212,17 +3232,17 @@ impl<'a> ToTokens for DeTypeGenerics<'a>
         }
         let (_, ty_generics, _) = generics.split_for_impl();
         ty_generics.to_tokens(tokens);
     }
 }
 
 #[cfg(feature = "deserialize_in_place")]
 impl<'a> ToTokens for InPlaceTypeGenerics<'a> {
-    fn to_tokens(&self, tokens: &mut Tokens) {
+    fn to_tokens(&self, tokens: &mut TokenStream) {
         let mut generics = self.0.generics.clone();
         generics.params = Some(syn::GenericParam::Lifetime(place_lifetime()))
             .into_iter()
             .chain(generics.params)
             .collect();
 
         if self.0.borrowed.de_lifetime_def().is_some() {
             let def = syn::LifetimeDef {
--- a/third_party/rust/serde_derive/src/fragment.rs
+++ b/third_party/rust/serde_derive/src/fragment.rs
@@ -1,25 +1,26 @@
 // Copyright 2017 Serde Developers
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use quote::{ToTokens, Tokens};
+use proc_macro2::TokenStream;
+use quote::ToTokens;
 use syn::token;
 
 pub enum Fragment {
     /// Tokens that can be used as an expression.
-    Expr(Tokens),
+    Expr(TokenStream),
     /// Tokens that can be used inside a block. The surrounding curly braces are
     /// not part of these tokens.
-    Block(Tokens),
+    Block(TokenStream),
 }
 
 macro_rules! quote_expr {
     ($($tt:tt)*) => {
         $crate::fragment::Fragment::Expr(quote!($($tt)*))
     }
 }
 
@@ -28,54 +29,54 @@ macro_rules! quote_block {
         $crate::fragment::Fragment::Block(quote!($($tt)*))
     }
 }
 
 /// Interpolate a fragment in place of an expression. This involves surrounding
 /// Block fragments in curly braces.
 pub struct Expr(pub Fragment);
 impl ToTokens for Expr {
-    fn to_tokens(&self, out: &mut Tokens) {
+    fn to_tokens(&self, out: &mut TokenStream) {
         match self.0 {
             Fragment::Expr(ref expr) => expr.to_tokens(out),
             Fragment::Block(ref block) => {
                 token::Brace::default().surround(out, |out| block.to_tokens(out));
             }
         }
     }
 }
 
 /// Interpolate a fragment as the statements of a block.
 pub struct Stmts(pub Fragment);
 impl ToTokens for Stmts {
-    fn to_tokens(&self, out: &mut Tokens) {
+    fn to_tokens(&self, out: &mut TokenStream) {
         match self.0 {
             Fragment::Expr(ref expr) => expr.to_tokens(out),
             Fragment::Block(ref block) => block.to_tokens(out),
         }
     }
 }
 
 /// Interpolate a fragment as the value part of a `match` expression. This
 /// involves putting a comma after expressions and curly braces around blocks.
 pub struct Match(pub Fragment);
 impl ToTokens for Match {
-    fn to_tokens(&self, out: &mut Tokens) {
+    fn to_tokens(&self, out: &mut TokenStream) {
         match self.0 {
             Fragment::Expr(ref expr) => {
                 expr.to_tokens(out);
                 <Token![,]>::default().to_tokens(out);
             }
             Fragment::Block(ref block) => {
                 token::Brace::default().surround(out, |out| block.to_tokens(out));
             }
         }
     }
 }
 
-impl AsRef<Tokens> for Fragment {
-    fn as_ref(&self) -> &Tokens {
+impl AsRef<TokenStream> for Fragment {
+    fn as_ref(&self) -> &TokenStream {
         match *self {
             Fragment::Expr(ref expr) => expr,
             Fragment::Block(ref block) => block,
         }
     }
 }
--- a/third_party/rust/serde_derive/src/internals/ast.rs
+++ b/third_party/rust/serde_derive/src/internals/ast.rs
@@ -3,17 +3,17 @@
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 use internals::attr;
 use internals::check;
-use internals::Ctxt;
+use internals::{Ctxt, Derive};
 use syn;
 use syn::punctuated::Punctuated;
 
 pub struct Container<'a> {
     pub ident: syn::Ident,
     pub attrs: attr::Container,
     pub data: Data<'a>,
     pub generics: &'a syn::Generics,
@@ -27,17 +27,17 @@ pub enum Data<'a> {
 pub struct Variant<'a> {
     pub ident: syn::Ident,
     pub attrs: attr::Variant,
     pub style: Style,
     pub fields: Vec<Field<'a>>,
 }
 
 pub struct Field<'a> {
-    pub ident: Option<syn::Ident>,
+    pub member: syn::Member,
     pub attrs: attr::Field,
     pub ty: &'a syn::Type,
     pub original: &'a syn::Field,
 }
 
 pub struct Repr {
     pub int_repr: Option<&'static str>,
     pub c_repr: bool,
@@ -48,17 +48,17 @@ pub struct Repr {
 pub enum Style {
     Struct,
     Tuple,
     Newtype,
     Unit,
 }
 
 impl<'a> Container<'a> {
-    pub fn from_ast(cx: &Ctxt, item: &'a syn::DeriveInput) -> Container<'a> {
+    pub fn from_ast(cx: &Ctxt, item: &'a syn::DeriveInput, derive: Derive) -> Container<'a> {
         let mut attrs = attr::Container::from_ast(cx, item);
 
         let mut data = match item.data {
             syn::Data::Enum(ref data) => {
                 let (repr, variants) = enum_from_ast(cx, item, &data.variants, attrs.default());
                 Data::Enum(repr, variants)
             }
             syn::Data::Struct(ref data) => {
@@ -88,23 +88,23 @@ impl<'a> Container<'a> {
                 field.attrs.rename_by_rule(attrs.rename_all());
             },
         }
 
         if has_flatten {
             attrs.mark_has_flatten();
         }
 
-        let item = Container {
-            ident: item.ident,
+        let mut item = Container {
+            ident: item.ident.clone(),
             attrs: attrs,
             data: data,
             generics: &item.generics,
         };
-        check::check(cx, &item);
+        check::check(cx, &mut item, derive);
         item
     }
 }
 
 impl<'a> Data<'a> {
     pub fn all_fields(&'a self) -> Box<Iterator<Item = &'a Field<'a>> + 'a> {
         match *self {
             Data::Enum(_, ref variants) => {
@@ -148,17 +148,17 @@ fn enum_from_ast<'a>(
     let variants = variants
         .iter()
         .map(
             |variant| {
                 let attrs = attr::Variant::from_ast(cx, variant);
                 let (style, fields) = 
                     struct_from_ast(cx, &variant.fields, Some(&attrs), container_default);
                 Variant {
-                    ident: variant.ident,
+                    ident: variant.ident.clone(),
                     attrs: attrs,
                     style: style,
                     fields: fields,
                 }
             },
         )
         .collect();
 
@@ -232,15 +232,18 @@ fn fields_from_ast<'a>(
     fields: &'a Punctuated<syn::Field, Token![,]>,
     attrs: Option<&attr::Variant>,
     container_default: &attr::Default,
 ) -> Vec<Field<'a>> {
     fields
         .iter()
         .enumerate()
         .map(|(i, field)| Field {
-            ident: field.ident,
+            member: match field.ident {
+                Some(ref ident) => syn::Member::Named(ident.clone()),
+                None => syn::Member::Unnamed(i.into()),
+            },
             attrs: attr::Field::from_ast(cx, i, field, attrs, container_default),
             ty: &field.ty,
             original: field,
         })
         .collect()
 }
--- a/third_party/rust/serde_derive/src/internals/attr.rs
+++ b/third_party/rust/serde_derive/src/internals/attr.rs
@@ -100,16 +100,17 @@ impl Name {
     pub fn deserialize_name(&self) -> String {
         self.deserialize.clone()
     }
 }
 
 /// Represents container (e.g. struct) attribute information
 pub struct Container {
     name: Name,
+    transparent: bool,
     deny_unknown_fields: bool,
     default: Default,
     rename_all: RenameRule,
     ser_bound: Option<Vec<syn::WherePredicate>>,
     de_bound: Option<Vec<syn::WherePredicate>>,
     tag: EnumTag,
     type_from: Option<syn::Type>,
     type_into: Option<syn::Type>,
@@ -176,16 +177,17 @@ impl Identifier {
     }
 }
 
 impl Container {
     /// Extract out the `#[serde(...)]` attributes from an item.
     pub fn from_ast(cx: &Ctxt, item: &syn::DeriveInput) -> Self {
         let mut ser_name = Attr::none(cx, "rename");
         let mut de_name = Attr::none(cx, "rename");
+        let mut transparent = BoolAttr::none(cx, "transparent");
         let mut deny_unknown_fields = BoolAttr::none(cx, "deny_unknown_fields");
         let mut default = Attr::none(cx, "default");
         let mut rename_all = Attr::none(cx, "rename_all");
         let mut ser_bound = Attr::none(cx, "bound");
         let mut de_bound = Attr::none(cx, "bound");
         let mut untagged = BoolAttr::none(cx, "untagged");
         let mut internal_tag = Attr::none(cx, "tag");
         let mut content = Attr::none(cx, "content");
@@ -195,66 +197,71 @@ impl Container {
         let mut field_identifier = BoolAttr::none(cx, "field_identifier");
         let mut variant_identifier = BoolAttr::none(cx, "variant_identifier");
 
         for meta_items in item.attrs.iter().filter_map(get_serde_meta_items) {
             for meta_item in meta_items {
                 match meta_item {
                     // Parse `#[serde(rename = "foo")]`
                     Meta(NameValue(ref m)) if m.ident == "rename" => {
-                        if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) {
+                        if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
                             ser_name.set(s.value());
                             de_name.set(s.value());
                         }
                     }
 
                     // Parse `#[serde(rename(serialize = "foo", deserialize = "bar"))]`
                     Meta(List(ref m)) if m.ident == "rename" => {
                         if let Ok((ser, de)) = get_renames(cx, &m.nested) {
                             ser_name.set_opt(ser.map(syn::LitStr::value));
                             de_name.set_opt(de.map(syn::LitStr::value));
                         }
                     }
 
                     // Parse `#[serde(rename_all = "foo")]`
                     Meta(NameValue(ref m)) if m.ident == "rename_all" => {
-                        if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) {
+                        if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
                             match RenameRule::from_str(&s.value()) {
                                 Ok(rename_rule) => rename_all.set(rename_rule),
                                 Err(()) => cx.error(format!(
                                     "unknown rename rule for #[serde(rename_all \
                                      = {:?})]",
                                     s.value()
                                 )),
                             }
                         }
                     }
 
+                    // Parse `#[serde(transparent)]`
+                    Meta(Word(ref word)) if word == "transparent" => {
+                        transparent.set_true();
+                    }
+
                     // Parse `#[serde(deny_unknown_fields)]`
-                    Meta(Word(word)) if word == "deny_unknown_fields" => {
+                    Meta(Word(ref word)) if word == "deny_unknown_fields" => {
                         deny_unknown_fields.set_true();
                     }
 
                     // Parse `#[serde(default)]`
-                    Meta(Word(word)) if word == "default" => match item.data {
+                    Meta(Word(ref word)) if word == "default" => match item.data {
                         syn::Data::Struct(syn::DataStruct {
                             fields: syn::Fields::Named(_),
                             ..
                         }) => {
                             default.set(Default::Default);
                         }
                         _ => cx.error(
                             "#[serde(default)] can only be used on structs \
                              with named fields",
                         ),
                     },
 
                     // Parse `#[serde(default = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "default" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             match item.data {
                                 syn::Data::Struct(syn::DataStruct {
                                     fields: syn::Fields::Named(_),
                                     ..
                                 }) => {
                                     default.set(Default::Path(path));
                                 }
                                 _ => cx.error(
@@ -263,102 +270,102 @@ impl Container {
                                 ),
                             }
                         }
                     }
 
                     // Parse `#[serde(bound = "D: Serialize")]`
                     Meta(NameValue(ref m)) if m.ident == "bound" => {
                         if let Ok(where_predicates) =
-                            parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit)
+                            parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit)
                         {
                             ser_bound.set(where_predicates.clone());
                             de_bound.set(where_predicates);
                         }
                     }
 
                     // Parse `#[serde(bound(serialize = "D: Serialize", deserialize = "D: Deserialize"))]`
                     Meta(List(ref m)) if m.ident == "bound" => {
                         if let Ok((ser, de)) = get_where_predicates(cx, &m.nested) {
                             ser_bound.set_opt(ser);
                             de_bound.set_opt(de);
                         }
                     }
 
                     // Parse `#[serde(untagged)]`
-                    Meta(Word(word)) if word == "untagged" => match item.data {
+                    Meta(Word(ref word)) if word == "untagged" => match item.data {
                         syn::Data::Enum(_) => {
                             untagged.set_true();
                         }
                         syn::Data::Struct(_) | syn::Data::Union(_) => {
                             cx.error("#[serde(untagged)] can only be used on enums")
                         }
                     },
 
                     // Parse `#[serde(tag = "type")]`
                     Meta(NameValue(ref m)) if m.ident == "tag" => {
-                        if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) {
+                        if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
                             match item.data {
                                 syn::Data::Enum(_) => {
                                     internal_tag.set(s.value());
                                 }
                                 syn::Data::Struct(_) | syn::Data::Union(_) => {
                                     cx.error("#[serde(tag = \"...\")] can only be used on enums")
                                 }
                             }
                         }
                     }
 
                     // Parse `#[serde(content = "c")]`
                     Meta(NameValue(ref m)) if m.ident == "content" => {
-                        if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) {
+                        if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
                             match item.data {
                                 syn::Data::Enum(_) => {
                                     content.set(s.value());
                                 }
                                 syn::Data::Struct(_) | syn::Data::Union(_) => cx.error(
                                     "#[serde(content = \"...\")] can only be used on \
                                      enums",
                                 ),
                             }
                         }
                     }
 
                     // Parse `#[serde(from = "Type")]
                     Meta(NameValue(ref m)) if m.ident == "from" => {
-                        if let Ok(from_ty) = parse_lit_into_ty(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(from_ty) = parse_lit_into_ty(cx, &m.ident, &m.lit) {
                             type_from.set_opt(Some(from_ty));
                         }
                     }
 
                     // Parse `#[serde(into = "Type")]
                     Meta(NameValue(ref m)) if m.ident == "into" => {
-                        if let Ok(into_ty) = parse_lit_into_ty(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(into_ty) = parse_lit_into_ty(cx, &m.ident, &m.lit) {
                             type_into.set_opt(Some(into_ty));
                         }
                     }
 
                     // Parse `#[serde(remote = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "remote" => {
-                        if let Ok(path) = parse_lit_into_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_path(cx, &m.ident, &m.lit) {
                             if is_primitive_path(&path, "Self") {
-                                remote.set(item.ident.into());
+                                remote.set(item.ident.clone().into());
                             } else {
                                 remote.set(path);
                             }
                         }
                     }
 
                     // Parse `#[serde(field_identifier)]`
-                    Meta(Word(word)) if word == "field_identifier" => {
+                    Meta(Word(ref word)) if word == "field_identifier" => {
                         field_identifier.set_true();
                     }
 
                     // Parse `#[serde(variant_identifier)]`
-                    Meta(Word(word)) if word == "variant_identifier" => {
+                    Meta(Word(ref word)) if word == "variant_identifier" => {
                         variant_identifier.set_true();
                     }
 
                     Meta(ref meta_item) => {
                         cx.error(format!(
                             "unknown serde container attribute `{}`",
                             meta_item.name()
                         ));
@@ -371,16 +378,17 @@ impl Container {
             }
         }
 
         Container {
             name: Name {
                 serialize: ser_name.get().unwrap_or_else(|| item.ident.to_string()),
                 deserialize: de_name.get().unwrap_or_else(|| item.ident.to_string()),
             },
+            transparent: transparent.get(),
             deny_unknown_fields: deny_unknown_fields.get(),
             default: default.get().unwrap_or(Default::None),
             rename_all: rename_all.get().unwrap_or(RenameRule::None),
             ser_bound: ser_bound.get(),
             de_bound: de_bound.get(),
             tag: decide_tag(cx, item, &untagged, internal_tag, content),
             type_from: type_from.get(),
             type_into: type_into.get(),
@@ -393,16 +401,20 @@ impl Container {
     pub fn name(&self) -> &Name {
         &self.name
     }
 
     pub fn rename_all(&self) -> &RenameRule {
         &self.rename_all
     }
 
+    pub fn transparent(&self) -> bool {
+        self.transparent
+    }
+
     pub fn deny_unknown_fields(&self) -> bool {
         self.deny_unknown_fields
     }
 
     pub fn default(&self) -> &Default {
         &self.default
     }
 
@@ -551,86 +563,86 @@ impl Variant {
         let mut deserialize_with = Attr::none(cx, "deserialize_with");
         let mut borrow = Attr::none(cx, "borrow");
 
         for meta_items in variant.attrs.iter().filter_map(get_serde_meta_items) {
             for meta_item in meta_items {
                 match meta_item {
                     // Parse `#[serde(rename = "foo")]`
                     Meta(NameValue(ref m)) if m.ident == "rename" => {
-                        if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) {
+                        if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
                             ser_name.set(s.value());
                             de_name.set(s.value());
                         }
                     }
 
                     // Parse `#[serde(rename(serialize = "foo", deserialize = "bar"))]`
                     Meta(List(ref m)) if m.ident == "rename" => {
                         if let Ok((ser, de)) = get_renames(cx, &m.nested) {
                             ser_name.set_opt(ser.map(syn::LitStr::value));
                             de_name.set_opt(de.map(syn::LitStr::value));
                         }
                     }
 
                     // Parse `#[serde(rename_all = "foo")]`
                     Meta(NameValue(ref m)) if m.ident == "rename_all" => {
-                        if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) {
+                        if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
                             match RenameRule::from_str(&s.value()) {
                                 Ok(rename_rule) => rename_all.set(rename_rule),
                                 Err(()) => cx.error(format!(
                                     "unknown rename rule for #[serde(rename_all \
                                      = {:?})]",
                                     s.value()
                                 )),
                             }
                         }
                     }
 
                     // Parse `#[serde(skip)]`
-                    Meta(Word(word)) if word == "skip" => {
+                    Meta(Word(ref word)) if word == "skip" => {
                         skip_serializing.set_true();
                         skip_deserializing.set_true();
                     }
 
                     // Parse `#[serde(skip_deserializing)]`
-                    Meta(Word(word)) if word == "skip_deserializing" => {
+                    Meta(Word(ref word)) if word == "skip_deserializing" => {
                         skip_deserializing.set_true();
                     }
 
                     // Parse `#[serde(skip_serializing)]`
-                    Meta(Word(word)) if word == "skip_serializing" => {
+                    Meta(Word(ref word)) if word == "skip_serializing" => {
                         skip_serializing.set_true();
                     }
 
                     // Parse `#[serde(other)]`
-                    Meta(Word(word)) if word == "other" => {
+                    Meta(Word(ref word)) if word == "other" => {
                         other.set_true();
                     }
 
                     // Parse `#[serde(bound = "D: Serialize")]`
                     Meta(NameValue(ref m)) if m.ident == "bound" => {
                         if let Ok(where_predicates) =
-                            parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit)
+                            parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit)
                         {
                             ser_bound.set(where_predicates.clone());
                             de_bound.set(where_predicates);
                         }
                     }
 
                     // Parse `#[serde(bound(serialize = "D: Serialize", deserialize = "D: Deserialize"))]`
                     Meta(List(ref m)) if m.ident == "bound" => {
                         if let Ok((ser, de)) = get_where_predicates(cx, &m.nested) {
                             ser_bound.set_opt(ser);
                             de_bound.set_opt(de);
                         }
                     }
 
                     // Parse `#[serde(with = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "with" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             let mut ser_path = path.clone();
                             ser_path
                                 .path
                                 .segments
                                 .push(Ident::new("serialize", Span::call_site()).into());
                             serialize_with.set(ser_path);
                             let mut de_path = path;
                             de_path
@@ -638,24 +650,24 @@ impl Variant {
                                 .segments
                                 .push(Ident::new("deserialize", Span::call_site()).into());
                             deserialize_with.set(de_path);
                         }
                     }
 
                     // Parse `#[serde(serialize_with = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "serialize_with" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             serialize_with.set(path);
                         }
                     }
 
                     // Parse `#[serde(deserialize_with = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "deserialize_with" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             deserialize_with.set(path);
                         }
                     }
 
                     // Defer `#[serde(borrow)]` and `#[serde(borrow = "'a + 'b")]`
                     Meta(ref m) if m.name() == "borrow" => match variant.fields {
                         syn::Fields::Unnamed(ref fields) if fields.unnamed.len() == 1 => {
                             borrow.set(m.clone());
@@ -759,30 +771,30 @@ pub struct Field {
     default: Default,
     serialize_with: Option<syn::ExprPath>,
     deserialize_with: Option<syn::ExprPath>,
     ser_bound: Option<Vec<syn::WherePredicate>>,
     de_bound: Option<Vec<syn::WherePredicate>>,
     borrowed_lifetimes: BTreeSet<syn::Lifetime>,
     getter: Option<syn::ExprPath>,
     flatten: bool,
+    transparent: bool,
 }
 
 /// Represents the default to use for a field when deserializing.
 pub enum Default {
     /// Field must always be specified because it does not have a default.
     None,
     /// The default is given by `std::default::Default::default()`.
     Default,
     /// The default is given by this function.
     Path(syn::ExprPath),
 }
 
 impl Default {
-    #[cfg(feature = "deserialize_in_place")]
     pub fn is_none(&self) -> bool {
         match *self {
             Default::None => true,
             Default::Default | Default::Path(_) => false,
         }
     }
 }
 
@@ -823,82 +835,82 @@ impl Field {
             .iter()
             .filter_map(get_serde_meta_items)
             .chain(variant_borrow)
         {
             for meta_item in meta_items {
                 match meta_item {
                     // Parse `#[serde(rename = "foo")]`
                     Meta(NameValue(ref m)) if m.ident == "rename" => {
-                        if let Ok(s) = get_lit_str(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit) {
+                        if let Ok(s) = get_lit_str(cx, &m.ident, &m.ident, &m.lit) {
                             ser_name.set(s.value());
                             de_name.set(s.value());
                         }
                     }
 
                     // Parse `#[serde(rename(serialize = "foo", deserialize = "bar"))]`
                     Meta(List(ref m)) if m.ident == "rename" => {
                         if let Ok((ser, de)) = get_renames(cx, &m.nested) {
                             ser_name.set_opt(ser.map(syn::LitStr::value));
                             de_name.set_opt(de.map(syn::LitStr::value));
                         }
                     }
 
                     // Parse `#[serde(default)]`
-                    Meta(Word(word)) if word == "default" => {
+                    Meta(Word(ref word)) if word == "default" => {
                         default.set(Default::Default);
                     }
 
                     // Parse `#[serde(default = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "default" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             default.set(Default::Path(path));
                         }
                     }
 
                     // Parse `#[serde(skip_serializing)]`
-                    Meta(Word(word)) if word == "skip_serializing" => {
+                    Meta(Word(ref word)) if word == "skip_serializing" => {
                         skip_serializing.set_true();
                     }
 
                     // Parse `#[serde(skip_deserializing)]`
-                    Meta(Word(word)) if word == "skip_deserializing" => {
+                    Meta(Word(ref word)) if word == "skip_deserializing" => {
                         skip_deserializing.set_true();
                     }
 
                     // Parse `#[serde(skip)]`
-                    Meta(Word(word)) if word == "skip" => {
+                    Meta(Word(ref word)) if word == "skip" => {
                         skip_serializing.set_true();
                         skip_deserializing.set_true();
                     }
 
                     // Parse `#[serde(skip_serializing_if = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "skip_serializing_if" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             skip_serializing_if.set(path);
                         }
                     }
 
                     // Parse `#[serde(serialize_with = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "serialize_with" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             serialize_with.set(path);
                         }
                     }
 
                     // Parse `#[serde(deserialize_with = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "deserialize_with" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             deserialize_with.set(path);
                         }
                     }
 
                     // Parse `#[serde(with = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "with" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             let mut ser_path = path.clone();
                             ser_path
                                 .path
                                 .segments
                                 .push(Ident::new("serialize", Span::call_site()).into());
                             serialize_with.set(ser_path);
                             let mut de_path = path;
                             de_path
@@ -907,66 +919,64 @@ impl Field {
                                 .push(Ident::new("deserialize", Span::call_site()).into());
                             deserialize_with.set(de_path);
                         }
                     }
 
                     // Parse `#[serde(bound = "D: Serialize")]`
                     Meta(NameValue(ref m)) if m.ident == "bound" => {
                         if let Ok(where_predicates) =
-                            parse_lit_into_where(cx, m.ident.as_ref(), m.ident.as_ref(), &m.lit)
+                            parse_lit_into_where(cx, &m.ident, &m.ident, &m.lit)
                         {
                             ser_bound.set(where_predicates.clone());
                             de_bound.set(where_predicates);
                         }
                     }
 
                     // Parse `#[serde(bound(serialize = "D: Serialize", deserialize = "D: Deserialize"))]`
                     Meta(List(ref m)) if m.ident == "bound" => {
                         if let Ok((ser, de)) = get_where_predicates(cx, &m.nested) {
                             ser_bound.set_opt(ser);
                             de_bound.set_opt(de);
                         }
                     }
 
                     // Parse `#[serde(borrow)]`
-                    Meta(Word(word)) if word == "borrow" => {
+                    Meta(Word(ref word)) if word == "borrow" => {
                         if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) {
                             borrowed_lifetimes.set(borrowable);
                         }
                     }
 
                     // Parse `#[serde(borrow = "'a + 'b")]`
                     Meta(NameValue(ref m)) if m.ident == "borrow" => {
-                        if let Ok(lifetimes) =
-                            parse_lit_into_lifetimes(cx, m.ident.as_ref(), &m.lit)
-                        {
+                        if let Ok(lifetimes) = parse_lit_into_lifetimes(cx, &m.ident, &m.lit) {
                             if let Ok(borrowable) = borrowable_lifetimes(cx, &ident, &field.ty) {
                                 for lifetime in &lifetimes {
                                     if !borrowable.contains(lifetime) {
                                         cx.error(format!(
                                             "field `{}` does not have lifetime {}",
                                             ident, lifetime
                                         ));
                                     }
                                 }
                                 borrowed_lifetimes.set(lifetimes);
                             }
                         }
                     }
 
                     // Parse `#[serde(getter = "...")]`
                     Meta(NameValue(ref m)) if m.ident == "getter" => {
-                        if let Ok(path) = parse_lit_into_expr_path(cx, m.ident.as_ref(), &m.lit) {
+                        if let Ok(path) = parse_lit_into_expr_path(cx, &m.ident, &m.lit) {
                             getter.set(path);
                         }
                     }
 
                     // Parse `#[serde(flatten)]`
-                    Meta(Word(word)) if word == "flatten" => {
+                    Meta(Word(ref word)) if word == "flatten" => {
                         flatten.set_true();
                     }
 
                     Meta(ref meta_item) => {
                         cx.error(format!(
                             "unknown serde field attribute `{}`",
                             meta_item.name()
                         ));
@@ -1061,16 +1071,17 @@ impl Field {
             default: default.get().unwrap_or(Default::None),
             serialize_with: serialize_with.get(),
             deserialize_with: deserialize_with.get(),
             ser_bound: ser_bound.get(),
             de_bound: de_bound.get(),
             borrowed_lifetimes: borrowed_lifetimes,
             getter: getter.get(),
             flatten: flatten.get(),
+            transparent: false,
         }
     }
 
     pub fn name(&self) -> &Name {
         &self.name
     }
 
     pub fn rename_by_rule(&mut self, rule: &RenameRule) {
@@ -1120,43 +1131,52 @@ impl Field {
 
     pub fn getter(&self) -> Option<&syn::ExprPath> {
         self.getter.as_ref()
     }
 
     pub fn flatten(&self) -> bool {
         self.flatten
     }
+
+    pub fn transparent(&self) -> bool {
+        self.transparent
+    }
+
+    pub fn mark_transparent(&mut self) {
+        self.transparent = true;
+    }
 }
 
 type SerAndDe<T> = (Option<T>, Option<T>);
 
 fn get_ser_and_de<'a, T, F>(
     cx: &Ctxt,
     attr_name: &'static str,
     metas: &'a Punctuated<syn::NestedMeta, Token![,]>,
     f: F,
 ) -> Result<SerAndDe<T>, ()>
 where
     T: 'a,
-    F: Fn(&Ctxt, &str, &str, &'a syn::Lit) -> Result<T, ()>,
+    F: Fn(&Ctxt, &Ident, &Ident, &'a syn::Lit) -> Result<T, ()>,
 {
     let mut ser_meta = Attr::none(cx, attr_name);
     let mut de_meta = Attr::none(cx, attr_name);
+    let attr_name = Ident::new(attr_name, Span::call_site());
 
     for meta in metas {
         match *meta {
             Meta(NameValue(ref meta)) if meta.ident == "serialize" => {
-                if let Ok(v) = f(cx, attr_name, meta.ident.as_ref(), &meta.lit) {
+                if let Ok(v) = f(cx, &attr_name, &meta.ident, &meta.lit) {
                     ser_meta.set(v);
                 }
             }
 
             Meta(NameValue(ref meta)) if meta.ident == "deserialize" => {
-                if let Ok(v) = f(cx, attr_name, meta.ident.as_ref(), &meta.lit) {
+                if let Ok(v) = f(cx, &attr_name, &meta.ident, &meta.lit) {
                     de_meta.set(v);
                 }
             }
 
             _ => {
                 cx.error(format!(
                     "malformed {0} attribute, expected `{0}(serialize = ..., \
                      deserialize = ...)`",
@@ -1195,82 +1215,82 @@ pub fn get_serde_meta_items(attr: &syn::
         }
     } else {
         None
     }
 }
 
 fn get_lit_str<'a>(
     cx: &Ctxt,
-    attr_name: &str,
-    meta_item_name: &str,
+    attr_name: &Ident,
+    meta_item_name: &Ident,
     lit: &'a syn::Lit,
 ) -> Result<&'a syn::LitStr, ()> {
     if let syn::Lit::Str(ref lit) = *lit {
         Ok(lit)
     } else {
         cx.error(format!(
             "expected serde {} attribute to be a string: `{} = \"...\"`",
             attr_name, meta_item_name
         ));
         Err(())
     }
 }
 
-fn parse_lit_into_path(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::Path, ()> {
+fn parse_lit_into_path(cx: &Ctxt, attr_name: &Ident, lit: &syn::Lit) -> Result<syn::Path, ()> {
     let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
     parse_lit_str(string)
         .map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value())))
 }
 
 fn parse_lit_into_expr_path(
     cx: &Ctxt,
-    attr_name: &str,
+    attr_name: &Ident,
     lit: &syn::Lit,
 ) -> Result<syn::ExprPath, ()> {
     let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
     parse_lit_str(string)
         .map_err(|_| cx.error(format!("failed to parse path: {:?}", string.value())))
 }
 
 fn parse_lit_into_where(
     cx: &Ctxt,
-    attr_name: &str,
-    meta_item_name: &str,
+    attr_name: &Ident,
+    meta_item_name: &Ident,
     lit: &syn::Lit,
 ) -> Result<Vec<syn::WherePredicate>, ()> {
     let string = try!(get_lit_str(cx, attr_name, meta_item_name, lit));
     if string.value().is_empty() {
         return Ok(Vec::new());
     }
 
     let where_string = syn::LitStr::new(&format!("where {}", string.value()), string.span());
 
     parse_lit_str::<syn::WhereClause>(&where_string)
         .map(|wh| wh.predicates.into_iter().collect())
         .map_err(|err| cx.error(err))
 }
 
-fn parse_lit_into_ty(cx: &Ctxt, attr_name: &str, lit: &syn::Lit) -> Result<syn::Type, ()> {
+fn parse_lit_into_ty(cx: &Ctxt, attr_name: &Ident, lit: &syn::Lit) -> Result<syn::Type, ()> {
     let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
 
     parse_lit_str(string).map_err(|_| {
         cx.error(format!(
             "failed to parse type: {} = {:?}",
             attr_name,
             string.value()
         ))
     })
 }
 
 // Parses a string literal like "'a + 'b + 'c" containing a nonempty list of
 // lifetimes separated by `+`.
 fn parse_lit_into_lifetimes(
     cx: &Ctxt,
-    attr_name: &str,
+    attr_name: &Ident,
     lit: &syn::Lit,
 ) -> Result<BTreeSet<syn::Lifetime>, ()> {
     let string = try!(get_lit_str(cx, attr_name, attr_name, lit));
     if string.value().is_empty() {
         cx.error("at least one lifetime must be borrowed");
         return Err(());
     }
 
@@ -1281,17 +1301,17 @@ fn parse_lit_into_lifetimes(
             call!(Punctuated::parse_separated_nonempty),
             BorrowedLifetimes
         ));
     }
 
     if let Ok(BorrowedLifetimes(lifetimes)) = parse_lit_str(string) {
         let mut set = BTreeSet::new();
         for lifetime in lifetimes {
-            if !set.insert(lifetime) {
+            if !set.insert(lifetime.clone()) {
                 cx.error(format!("duplicate borrowed lifetime `{}`", lifetime));
             }
         }
         return Ok(set);
     }
 
     cx.error(format!(
         "failed to parse borrowed lifetimes: {:?}",
@@ -1421,17 +1441,19 @@ fn is_slice_u8(ty: &syn::Type) -> bool {
 fn is_primitive_type(ty: &syn::Type, primitive: &str) -> bool {
     match *ty {
         syn::Type::Path(ref ty) => ty.qself.is_none() && is_primitive_path(&ty.path, primitive),
         _ => false,
     }
 }
 
 fn is_primitive_path(path: &syn::Path, primitive: &str) -> bool {
-    path.leading_colon.is_none() && path.segments.len() == 1 && path.segments[0].ident == primitive
+    path.leading_colon.is_none()
+        && path.segments.len() == 1
+        && path.segments[0].ident == primitive
         && path.segments[0].arguments.is_empty()
 }
 
 // All lifetimes that this type could borrow from a Deserializer.
 //
 // For example a type `S<'a, 'b>` could borrow `'a` and `'b`. On the other hand
 // a type `for<'a> fn(&'a str)` could not borrow `'a` from the Deserializer.
 //
--- a/third_party/rust/serde_derive/src/internals/check.rs
+++ b/third_party/rust/serde_derive/src/internals/check.rs
@@ -3,27 +3,29 @@
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 use internals::ast::{Container, Data, Field, Style};
 use internals::attr::{EnumTag, Identifier};
-use internals::Ctxt;
+use internals::{Ctxt, Derive};
+use syn::{Member, Type};
 
 /// Cross-cutting checks that require looking at more than a single attrs
 /// object. Simpler checks should happen when parsing and building the attrs.
-pub fn check(cx: &Ctxt, cont: &Container) {
+pub fn check(cx: &Ctxt, cont: &mut Container, derive: Derive) {
     check_getter(cx, cont);
     check_flatten(cx, cont);
     check_identifier(cx, cont);
     check_variant_skip_attrs(cx, cont);
     check_internal_tag_field_name_conflict(cx, cont);
     check_adjacent_tag_conflict(cx, cont);
+    check_transparent(cx, cont, derive);
 }
 
 /// Getters are only allowed inside structs (not enums) with the `remote`
 /// attribute.
 fn check_getter(cx: &Ctxt, cont: &Container) {
     match cont.data {
         Data::Enum(_, _) => {
             if cont.data.has_getter() {
@@ -166,60 +168,54 @@ fn check_variant_skip_attrs(cx: &Ctxt, c
             if variant.attrs.skip_serializing() {
                 cx.error(format!(
                     "variant `{}` cannot have both #[serde(serialize_with)] and \
                      #[serde(skip_serializing)]",
                     variant.ident
                 ));
             }
 
-            for (i, field) in variant.fields.iter().enumerate() {
-                let ident = field
-                    .ident
-                    .as_ref()
-                    .map_or_else(|| format!("{}", i), |ident| format!("`{}`", ident));
+            for field in &variant.fields {
+                let member = member_message(&field.member);
 
                 if field.attrs.skip_serializing() {
                     cx.error(format!(
                         "variant `{}` cannot have both #[serde(serialize_with)] and \
                          a field {} marked with #[serde(skip_serializing)]",
-                        variant.ident, ident
+                        variant.ident, member
                     ));
                 }
 
                 if field.attrs.skip_serializing_if().is_some() {
                     cx.error(format!(
                         "variant `{}` cannot have both #[serde(serialize_with)] and \
                          a field {} marked with #[serde(skip_serializing_if)]",
-                        variant.ident, ident
+                        variant.ident, member
                     ));
                 }
             }
         }
 
         if variant.attrs.deserialize_with().is_some() {
             if variant.attrs.skip_deserializing() {
                 cx.error(format!(
                     "variant `{}` cannot have both #[serde(deserialize_with)] and \
                      #[serde(skip_deserializing)]",
                     variant.ident
                 ));
             }
 
-            for (i, field) in variant.fields.iter().enumerate() {
+            for field in &variant.fields {
                 if field.attrs.skip_deserializing() {
-                    let ident = field
-                        .ident
-                        .as_ref()
-                        .map_or_else(|| format!("{}", i), |ident| format!("`{}`", ident));
+                    let member = member_message(&field.member);
 
                     cx.error(format!(
                         "variant `{}` cannot have both #[serde(deserialize_with)] \
                          and a field {} marked with #[serde(skip_deserializing)]",
-                        variant.ident, ident
+                        variant.ident, member
                     ));
                 }
             }
         }
     }
 }
 
 /// The tag of an internally-tagged struct variant must not be
@@ -277,8 +273,83 @@ fn check_adjacent_tag_conflict(cx: &Ctxt
     if type_tag == content_tag {
         let message = format!(
             "enum tags `{}` for type and content conflict with each other",
             type_tag
         );
         cx.error(message);
     }
 }
+
+/// Enums and unit structs cannot be transparent.
+fn check_transparent(cx: &Ctxt, cont: &mut Container, derive: Derive) {
+    if !cont.attrs.transparent() {
+        return;
+    }
+
+    if cont.attrs.type_from().is_some() {
+        cx.error("#[serde(transparent)] is not allowed with #[serde(from = \"...\")]");
+    }
+
+    if cont.attrs.type_into().is_some() {
+        cx.error("#[serde(transparent)] is not allowed with #[serde(into = \"...\")]");
+    }
+
+    let fields = match cont.data {
+        Data::Enum(_, _) => {
+            cx.error("#[serde(transparent)] is not allowed on an enum");
+            return;
+        }
+        Data::Struct(Style::Unit, _) => {
+            cx.error("#[serde(transparent)] is not allowed on a unit struct");
+            return;
+        }
+        Data::Struct(_, ref mut fields) => fields,
+    };
+
+    let mut transparent_field = None;
+
+    for field in fields {
+        if allow_transparent(field, derive) {
+            if transparent_field.is_some() {
+                cx.error(
+                    "#[serde(transparent)] requires struct to have at most one transparent field",
+                );
+                return;
+            }
+            transparent_field = Some(field);
+        }
+    }
+
+    match transparent_field {
+        Some(transparent_field) => transparent_field.attrs.mark_transparent(),
+        None => match derive {
+            Derive::Serialize => {
+                cx.error("#[serde(transparent)] requires at least one field that is not skipped");
+            }
+            Derive::Deserialize => {
+                cx.error("#[serde(transparent)] requires at least one field that is neither skipped nor has a default");
+            }
+        },
+    }
+}
+
+fn member_message(member: &Member) -> String {
+    match *member {
+        Member::Named(ref ident) => format!("`{}`", ident),
+        Member::Unnamed(ref i) => i.index.to_string(),
+    }
+}
+
+fn allow_transparent(field: &Field, derive: Derive) -> bool {
+    if let Type::Path(ref ty) = *field.ty {
+        if let Some(seg) = ty.path.segments.last() {
+            if seg.into_value().ident == "PhantomData" {
+                return false;
+            }
+        }
+    }
+
+    match derive {
+        Derive::Serialize => !field.attrs.skip_serializing(),
+        Derive::Deserialize => !field.attrs.skip_deserializing() && field.attrs.default().is_none(),
+    }
+}
--- a/third_party/rust/serde_derive/src/internals/mod.rs
+++ b/third_party/rust/serde_derive/src/internals/mod.rs
@@ -9,8 +9,14 @@
 pub mod ast;
 pub mod attr;
 
 mod ctxt;
 pub use self::ctxt::Ctxt;
 
 mod case;
 mod check;
+
+#[derive(Copy, Clone)]
+pub enum Derive {
+    Serialize,
+    Deserialize,
+}
--- a/third_party/rust/serde_derive/src/lib.rs
+++ b/third_party/rust/serde_derive/src/lib.rs
@@ -17,24 +17,24 @@
 //! #
 //! # fn main() {}
 //! ```
 //!
 //! Please refer to [https://serde.rs/derive.html] for how to set this up.
 //!
 //! [https://serde.rs/derive.html]: https://serde.rs/derive.html
 
-#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.58")]
+#![doc(html_root_url = "https://docs.rs/serde_derive/1.0.66")]
 #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
 // Whitelisted clippy lints
 #![cfg_attr(
     feature = "cargo-clippy",
     allow(
         enum_variant_names, redundant_field_names, too_many_arguments, used_underscore_binding,
-        cyclomatic_complexity
+        cyclomatic_complexity, needless_pass_by_value
     )
 )]
 // Whitelisted clippy_pedantic lints
 #![cfg_attr(
     feature = "cargo-clippy",
     allow(
         items_after_statements, doc_markdown, stutter, similar_names, use_self, single_match_else,
         enum_glob_use, match_same_arms, filter_map, cast_possible_truncation
@@ -64,22 +64,26 @@ mod fragment;
 mod de;
 mod pretend;
 mod ser;
 mod try;
 
 #[proc_macro_derive(Serialize, attributes(serde))]
 pub fn derive_serialize(input: TokenStream) -> TokenStream {
     let input: DeriveInput = syn::parse(input).unwrap();
-    match ser::expand_derive_serialize(&input) {
-        Ok(expanded) => expanded.into(),
-        Err(msg) => panic!(msg),
-    }
+    ser::expand_derive_serialize(&input)
+        .unwrap_or_else(compile_error)
+        .into()
 }
 
 #[proc_macro_derive(Deserialize, attributes(serde))]
 pub fn derive_deserialize(input: TokenStream) -> TokenStream {
     let input: DeriveInput = syn::parse(input).unwrap();
-    match de::expand_derive_deserialize(&input) {
-        Ok(expanded) => expanded.into(),
-        Err(msg) => panic!(msg),
+    de::expand_derive_deserialize(&input)
+        .unwrap_or_else(compile_error)
+        .into()
+}
+
+fn compile_error(message: String) -> proc_macro2::TokenStream {
+    quote! {
+        compile_error!(#message);
     }
 }
--- a/third_party/rust/serde_derive/src/pretend.rs
+++ b/third_party/rust/serde_derive/src/pretend.rs
@@ -1,10 +1,9 @@
-use proc_macro2::Span;
-use quote::Tokens;
+use proc_macro2::{Span, TokenStream};
 use syn::Ident;
 
 use internals::ast::{Container, Data, Field, Style};
 
 // Suppress dead_code warnings that would otherwise appear when using a remote
 // derive. Other than this pretend code, a struct annotated with remote derive
 // never has its fields referenced and an enum annotated with remote derive
 // never has its variants constructed.
@@ -16,17 +15,17 @@ use internals::ast::{Container, Data, Fi
 //       |                    ^^^^^^
 //
 //     warning: variant is never constructed: `V`
 //      --> src/main.rs:8:16
 //       |
 //     8 | enum EnumDef { V }
 //       |                ^
 //
-pub fn pretend_used(cont: &Container) -> Tokens {
+pub fn pretend_used(cont: &Container) -> TokenStream {
     let pretend_fields = pretend_fields_used(cont);
     let pretend_variants = pretend_variants_used(cont);
 
     quote! {
         #pretend_fields
         #pretend_variants
     }
 }
@@ -44,26 +43,26 @@ pub fn pretend_used(cont: &Container) ->
 //         Some(T::A { a: ref __v0 }) => {}
 //         Some(T::B { b: ref __v0 }) => {}
 //         _ => {}
 //     }
 //
 // The `ref` is important in case the user has written a Drop impl on their
 // type. Rust does not allow destructuring a struct or enum that has a Drop
 // impl.
-fn pretend_fields_used(cont: &Container) -> Tokens {
-    let type_ident = cont.ident;
+fn pretend_fields_used(cont: &Container) -> TokenStream {
+    let type_ident = &cont.ident;
     let (_, ty_generics, _) = cont.generics.split_for_impl();
 
     let patterns = match cont.data {
         Data::Enum(_, ref variants) => variants
             .iter()
             .filter_map(|variant| match variant.style {
                 Style::Struct => {
-                    let variant_ident = variant.ident;
+                    let variant_ident = &variant.ident;
                     let pat = struct_pattern(&variant.fields);
                     Some(quote!(#type_ident::#variant_ident #pat))
                 }
                 _ => None,
             })
             .collect::<Vec<_>>(),
         Data::Struct(Style::Struct, ref fields) => {
             let pat = struct_pattern(fields);
@@ -88,38 +87,38 @@ fn pretend_fields_used(cont: &Container)
 //
 //     match None {
 //         Some((__v0, __v1,)) => {
 //             let _ = E::V { a: __v0, b: __v1 };
 //         }
 //         _ => {}
 //     }
 //
-fn pretend_variants_used(cont: &Container) -> Tokens {
+fn pretend_variants_used(cont: &Container) -> TokenStream {
     let variants = match cont.data {
         Data::Enum(_, ref variants) => variants,
         Data::Struct(_, _) => {
             return quote!();
         }
     };
 
-    let type_ident = cont.ident;
+    let type_ident = &cont.ident;
     let (_, ty_generics, _) = cont.generics.split_for_impl();
     let turbofish = ty_generics.as_turbofish();
 
     let cases = variants.iter().map(|variant| {
-        let variant_ident = variant.ident;
+        let variant_ident = &variant.ident;
         let placeholders = &(0..variant.fields.len())
             .map(|i| Ident::new(&format!("__v{}", i), Span::call_site()))
             .collect::<Vec<_>>();
 
         let pat = match variant.style {
             Style::Struct => {
-                let names = variant.fields.iter().map(|field| field.ident);
-                quote!({ #(#names: #placeholders),* })
+                let members = variant.fields.iter().map(|field| &field.member);
+                quote!({ #(#members: #placeholders),* })
             }
             Style::Tuple | Style::Newtype => quote!(( #(#placeholders),* )),
             Style::Unit => quote!(),
         };
 
         quote! {
             match _serde::export::None {
                 _serde::export::Some((#(#placeholders,)*)) => {
@@ -128,14 +127,14 @@ fn pretend_variants_used(cont: &Containe
                 _ => {}
             }
         }
     });
 
     quote!(#(#cases)*)
 }
 
-fn struct_pattern(fields: &[Field]) -> Tokens {
-    let names = fields.iter().map(|field| field.ident);
+fn struct_pattern(fields: &[Field]) -> TokenStream {
+    let members = fields.iter().map(|field| &field.member);
     let placeholders =
         (0..fields.len()).map(|i| Ident::new(&format!("__v{}", i), Span::call_site()));
-    quote!({ #(#names: ref #placeholders),* })
+    quote!({ #(#members: ref #placeholders),* })
 }
--- a/third_party/rust/serde_derive/src/ser.rs
+++ b/third_party/rust/serde_derive/src/ser.rs
@@ -1,35 +1,34 @@
 // Copyright 2017 Serde Developers
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use proc_macro2::Span;
-use quote::Tokens;
+use proc_macro2::{Span, TokenStream};
 use syn::spanned::Spanned;
 use syn::{self, Ident, Index, Member};
 
 use bound;
 use fragment::{Fragment, Match, Stmts};
 use internals::ast::{Container, Data, Field, Style, Variant};
-use internals::{attr, Ctxt};
+use internals::{attr, Ctxt, Derive};
 use pretend;
 use try;
 
-pub fn expand_derive_serialize(input: &syn::DeriveInput) -> Result<Tokens, String> {
+pub fn expand_derive_serialize(input: &syn::DeriveInput) -> Result<TokenStream, String> {
     let ctxt = Ctxt::new();
-    let cont = Container::from_ast(&ctxt, input);
+    let cont = Container::from_ast(&ctxt, input, Derive::Serialize);
     precondition(&ctxt, &cont);
     try!(ctxt.check());
 
-    let ident = cont.ident;
+    let ident = &cont.ident;
     let params = Parameters::new(&cont);
     let (impl_generics, ty_generics, where_clause) = params.generics.split_for_impl();
     let dummy_const = Ident::new(&format!("_IMPL_SERIALIZE_FOR_{}", ident), Span::call_site());
     let body = Stmts(serialize_body(&cont, &params));
 
     let impl_block = if let Some(remote) = cont.attrs.remote() {
         let vis = &input.vis;
         let used = pretend::pretend_used(&cont);
@@ -105,33 +104,33 @@ impl Parameters {
         let self_var = if is_remote {
             Ident::new("__self", Span::call_site())
         } else {
             Ident::new("self", Span::call_site())
         };
 
         let this = match cont.attrs.remote() {
             Some(remote) => remote.clone(),
-            None => cont.ident.into(),
+            None => cont.ident.clone().into(),
         };
 
         let generics = build_generics(cont);
 
         Parameters {
             self_var: self_var,
             this: this,
             generics: generics,
             is_remote: is_remote,
         }
     }
 
     /// Type name to use in error messages and `&'static str` arguments to
     /// various Serializer methods.
-    fn type_name(&self) -> &str {
-        self.this.segments.last().unwrap().value().ident.as_ref()
+    fn type_name(&self) -> String {
+        self.this.segments.last().unwrap().value().ident.to_string()
     }
 }
 
 // All the generics in the input, plus a bound `T: Serialize` for each generic
 // field type that will be serialized by us.
 fn build_generics(cont: &Container) -> syn::Generics {
     let generics = bound::without_defaults(cont.generics);
 
@@ -155,49 +154,66 @@ fn build_generics(cont: &Container) -> s
 // Fields with a `skip_serializing` or `serialize_with` attribute, or which
 // belong to a variant with a 'skip_serializing` or `serialize_with` attribute,
 // are not serialized by us so we do not generate a bound. Fields with a `bound`
 // attribute specify their own bound so we do not generate one. All other fields
 // may need a `T: Serialize` bound where T is the type of the field.
 fn needs_serialize_bound(field: &attr::Field, variant: Option<&attr::Variant>) -> bool {
     !field.skip_serializing() && field.serialize_with().is_none() && field.ser_bound().is_none()
         && variant.map_or(true, |variant| {
-            !variant.skip_serializing() && variant.serialize_with().is_none()
+            !variant.skip_serializing()
+                && variant.serialize_with().is_none()
                 && variant.ser_bound().is_none()
         })
 }
 
 fn serialize_body(cont: &Container, params: &Parameters) -> Fragment {
-    if let Some(type_into) = cont.attrs.type_into() {
+    if cont.attrs.transparent() {
+        serialize_transparent(cont, params)
+    } else if let Some(type_into) = cont.attrs.type_into() {
         serialize_into(params, type_into)
     } else {
         match cont.data {
             Data::Enum(_, ref variants) => serialize_enum(params, variants, &cont.attrs),
             Data::Struct(Style::Struct, ref fields) => {
-                if fields.iter().any(|field| field.ident.is_none()) {
-                    panic!("struct has unnamed fields");
-                }
                 serialize_struct(params, fields, &cont.attrs)
             }
             Data::Struct(Style::Tuple, ref fields) => {
-                if fields.iter().any(|field| field.ident.is_some()) {
-                    panic!("tuple struct has named fields");
-                }
                 serialize_tuple_struct(params, fields, &cont.attrs)
             }
             Data::Struct(Style::Newtype, ref fields) => {
                 serialize_newtype_struct(params, &fields[0], &cont.attrs)
             }
             Data::Struct(Style::Unit, _) => serialize_unit_struct(&cont.attrs),
         }
     }
 }
 
+fn serialize_transparent(cont: &Container, params: &Parameters) -> Fragment {
+    let fields = match cont.data {
+        Data::Struct(_, ref fields) => fields,
+        Data::Enum(_, _) => unreachable!(),
+    };
+
+    let self_var = &params.self_var;
+    let transparent_field = fields.iter().find(|f| f.attrs.transparent()).unwrap();
+    let member = &transparent_field.member;
+
+    let path = match transparent_field.attrs.serialize_with() {
+        Some(path) => quote!(#path),
+        None => quote!(_serde::Serialize::serialize),
+    };
+
+    quote_block! {
+        #path(&#self_var.#member, __serializer)
+    }
+}
+
 fn serialize_into(params: &Parameters, type_into: &syn::Type) -> Fragment {
-    let self_var = params.self_var;
+    let self_var = &params.self_var;
     quote_block! {
         _serde::Serialize::serialize(
             &_serde::export::Into::<#type_into>::into(_serde::export::Clone::clone(#self_var)),
             __serializer)
     }
 }
 
 fn serialize_unit_struct(cattrs: &attr::Container) -> Fragment {
@@ -299,18 +315,17 @@ fn serialize_struct_as_struct(
         .peekable();
 
     let let_mut = mut_if(serialized_fields.peek().is_some());
 
     let len = serialized_fields
         .map(|field| match field.attrs.skip_serializing_if() {
             None => quote!(1),
             Some(path) => {
-                let ident = field.ident.expect("struct has unnamed fields");
-                let field_expr = get_member(params, field, &Member::Named(ident));
+                let field_expr = get_member(params, field, &field.member);
                 quote!(if #path(#field_expr) { 0 } else { 1 })
             }
         })
         .fold(quote!(0), |sum, expr| quote!(#sum + #expr));
 
     quote_block! {
         let #let_mut __serde_state = try!(_serde::Serializer::serialize_struct(__serializer, #type_name, #len));
         #(#serialize_fields)*
@@ -335,18 +350,17 @@ fn serialize_struct_as_map(
 
     let len = if cattrs.has_flatten() {
         quote!(_serde::export::None)
     } else {
         let len = serialized_fields
             .map(|field| match field.attrs.skip_serializing_if() {
                 None => quote!(1),
                 Some(path) => {
-                    let ident = field.ident.expect("struct has unnamed fields");
-                    let field_expr = get_member(params, field, &Member::Named(ident));
+                    let field_expr = get_member(params, field, &field.member);
                     quote!(if #path(#field_expr) { 0 } else { 1 })
                 }
             })
             .fold(quote!(0), |sum, expr| quote!(#sum + #expr));
         quote!(_serde::export::Some(#len))
     };
 
     quote_block! {
@@ -354,17 +368,17 @@ fn serialize_struct_as_map(
         #(#serialize_fields)*
         _serde::ser::SerializeMap::end(__serde_state)
     }
 }
 
 fn serialize_enum(params: &Parameters, variants: &[Variant], cattrs: &attr::Container) -> Fragment {
     assert!(variants.len() as u64 <= u64::from(u32::max_value()));
 
-    let self_var = params.self_var;
+    let self_var = &params.self_var;
 
     let arms: Vec<_> = variants
         .iter()
         .enumerate()
         .map(|(variant_index, variant)| {
             serialize_variant(params, variant, variant_index as u32, cattrs)
         })
         .collect();
@@ -376,19 +390,19 @@ fn serialize_enum(params: &Parameters, v
     }
 }
 
 fn serialize_variant(
     params: &Parameters,
     variant: &Variant,
     variant_index: u32,
     cattrs: &attr::Container,
-) -> Tokens {
+) -> TokenStream {
     let this = &params.this;
-    let variant_ident = variant.ident;
+    let variant_ident = &variant.ident;
 
     if variant.attrs.skip_serializing() {
         let skipped_msg = format!(
             "the enum variant {}::{} cannot be serialized",
             params.type_name(),
             variant_ident
         );
         let skipped_err = quote! {
@@ -418,22 +432,19 @@ fn serialize_variant(
             Style::Tuple => {
                 let field_names = (0..variant.fields.len())
                     .map(|i| Ident::new(&format!("__field{}", i), Span::call_site()));
                 quote! {
                     #this::#variant_ident(#(ref #field_names),*)
                 }
             }
             Style::Struct => {
-                let fields = variant
-                    .fields
-                    .iter()
-                    .map(|f| f.ident.expect("struct variant has unnamed fields"));
+                let members = variant.fields.iter().map(|f| &f.member);
                 quote! {
-                    #this::#variant_ident { #(ref #fields),* }
+                    #this::#variant_ident { #(ref #members),* }
                 }
             }
         };
 
         let body = Match(match *cattrs.tag() {
             attr::EnumTag::External => {
                 serialize_externally_tagged_variant(params, variant, variant_index, cattrs)
             }
@@ -529,17 +540,17 @@ fn serialize_internally_tagged_variant(
     variant: &Variant,
     cattrs: &attr::Container,
     tag: &str,
 ) -> Fragment {
     let type_name = cattrs.name().serialize_name();
     let variant_name = variant.attrs.name().serialize_name();
 
     let enum_ident_str = params.type_name();
-    let variant_ident_str = variant.ident.as_ref();
+    let variant_ident_str = variant.ident.to_string();
 
     if let Some(path) = variant.attrs.serialize_with() {
         let ser = wrap_serialize_variant_with(params, path, variant);
         return quote_expr! {
             _serde::private::ser::serialize_tagged_newtype(
                 __serializer,
                 #enum_ident_str,
                 #variant_ident_str,
@@ -651,25 +662,21 @@ fn serialize_adjacently_tagged_variant(
     let fields_ident: &Vec<_> = &match variant.style {
         Style::Unit => {
             if variant.attrs.serialize_with().is_some() {
                 vec![]
             } else {
                 unreachable!()
             }
         }
-        Style::Newtype => vec![Ident::new("__field0", Span::call_site())],
+        Style::Newtype => vec![Member::Named(Ident::new("__field0", Span::call_site()))],
         Style::Tuple => (0..variant.fields.len())
-            .map(|i| Ident::new(&format!("__field{}", i), Span::call_site()))
+            .map(|i| Member::Named(Ident::new(&format!("__field{}", i), Span::call_site())))
             .collect(),
-        Style::Struct => variant
-            .fields
-            .iter()
-            .map(|f| f.ident.expect("struct variant has unnamed fields"))
-            .collect(),
+        Style::Struct => variant.fields.iter().map(|f| f.member.clone()).collect(),
     };
 
     let (_, ty_generics, where_clause) = params.generics.split_for_impl();
 
     let wrapper_generics = if fields_ident.is_empty() {
         params.generics.clone()
     } else {
         bound::with_lifetime_bound(&params.generics, "'__a")
@@ -845,20 +852,20 @@ fn serialize_struct_variant<'a>(
         .iter()
         .filter(|&field| !field.attrs.skip_serializing())
         .peekable();
 
     let let_mut = mut_if(serialized_fields.peek().is_some());
 
     let len = serialized_fields
         .map(|field| {
-            let ident = field.ident.expect("struct has unnamed fields");
+            let member = &field.member;
 
             match field.attrs.skip_serializing_if() {
-                Some(path) => quote!(if #path(#ident) { 0 } else { 1 }),
+                Some(path) => quote!(if #path(#member) { 0 } else { 1 }),
                 None => quote!(1),
             }
         })
         .fold(quote!(0), |sum, expr| quote!(#sum + #expr));
 
     match context {
         StructVariant::ExternallyTagged {
             variant_index,
@@ -924,49 +931,49 @@ fn serialize_struct_variant_with_flatten
 
     match context {
         StructVariant::ExternallyTagged {
             variant_index,
             variant_name,
         } => {
             let this = &params.this;
             let fields_ty = fields.iter().map(|f| &f.ty);
-            let fields_ident = &fields.iter().map(|f| f.ident).collect::<Vec<_>>();
+            let members = &fields.iter().map(|f| &f.member).collect::<Vec<_>>();
 
             let (_, ty_generics, where_clause) = params.generics.split_for_impl();
             let wrapper_generics = bound::with_lifetime_bound(&params.generics, "'__a");
             let (wrapper_impl_generics, wrapper_ty_generics, _) = wrapper_generics.split_for_impl();
 
             quote_block! {
                 struct __EnumFlatten #wrapper_generics #where_clause {
                     data: (#(&'__a #fields_ty,)*),
                     phantom: _serde::export::PhantomData<#this #ty_generics>,
                 }
 
                 impl #wrapper_impl_generics _serde::Serialize for __EnumFlatten #wrapper_ty_generics #where_clause {
                     fn serialize<__S>(&self, __serializer: __S) -> _serde::export::Result<__S::Ok, __S::Error>
                     where
                         __S: _serde::Serializer,
                     {
-                        let (#(#fields_ident,)*) = self.data;
+                        let (#(#members,)*) = self.data;
                         let #let_mut __serde_state = try!(_serde::Serializer::serialize_map(
                             __serializer,
                             _serde::export::None));
                         #(#serialize_fields)*
                         _serde::ser::SerializeMap::end(__serde_state)
                     }
                 }
 
                 _serde::Serializer::serialize_newtype_variant(
                     __serializer,
                     #name,
                     #variant_index,
                     #variant_name,
                     &__EnumFlatten {
-                        data: (#(#fields_ident,)*),
+                        data: (#(#members,)*),
                         phantom: _serde::export::PhantomData::<#this #ty_generics>,
                     })
             }
         }
         StructVariant::InternallyTagged { tag, variant_name } => {
             quote_block! {
                 let #let_mut __serde_state = try!(_serde::Serializer::serialize_map(
                     __serializer,
@@ -992,17 +999,17 @@ fn serialize_struct_variant_with_flatten
     }
 }
 
 fn serialize_tuple_struct_visitor(
     fields: &[Field],
     params: &Parameters,
     is_enum: bool,
     tuple_trait: &TupleTrait,
-) -> Vec<Tokens> {
+) -> Vec<TokenStream> {
     fields
         .iter()
         .enumerate()
         .filter(|&(_, ref field)| !field.attrs.skip_serializing())
         .map(|(i, field)| {
             let mut field_expr = if is_enum {
                 let id = Ident::new(&format!("__field{}", i), Span::call_site());
                 quote!(#id)
@@ -1040,27 +1047,27 @@ fn serialize_tuple_struct_visitor(
         .collect()
 }
 
 fn serialize_struct_visitor(
     fields: &[Field],
     params: &Parameters,
     is_enum: bool,
     struct_trait: &StructTrait,
-) -> Vec<Tokens> {
+) -> Vec<TokenStream> {
     fields
         .iter()
         .filter(|&field| !field.attrs.skip_serializing())
         .map(|field| {
-            let field_ident = field.ident.expect("struct has unnamed field");
+            let member = &field.member;
 
             let mut field_expr = if is_enum {
-                quote!(#field_ident)
+                quote!(#member)
             } else {
-                get_member(params, field, &Member::Named(field_ident))
+                get_member(params, field, &member)
             };
 
             let key_expr = field.attrs.name().serialize_name();
 
             let skip = field
                 .attrs
                 .skip_serializing_if()
                 .map(|path| quote!(#path(#field_expr)));
@@ -1104,52 +1111,54 @@ fn serialize_struct_visitor(
         })
         .collect()
 }
 
 fn wrap_serialize_field_with(
     params: &Parameters,
     field_ty: &syn::Type,
     serialize_with: &syn::ExprPath,
-    field_expr: &Tokens,
-) -> Tokens {
+    field_expr: &TokenStream,
+) -> TokenStream {
     wrap_serialize_with(params, serialize_with, &[field_ty], &[quote!(#field_expr)])
 }
 
 fn wrap_serialize_variant_with(
     params: &Parameters,
     serialize_with: &syn::ExprPath,
     variant: &Variant,
-) -> Tokens {
+) -> TokenStream {
     let field_tys: Vec<_> = variant.fields.iter().map(|field| field.ty).collect();
     let field_exprs: Vec<_> = variant
         .fields
         .iter()
-        .enumerate()
-        .map(|(i, field)| {
-            let id = field
-                .ident
-                .unwrap_or_else(|| Ident::new(&format!("__field{}", i), Span::call_site()));
+        .map(|field| {
+            let id = match field.member {
+                Member::Named(ref ident) => ident.clone(),
+                Member::Unnamed(ref member) => {
+                    Ident::new(&format!("__field{}", member.index), Span::call_site())
+                }
+            };
             quote!(#id)
         })
         .collect();
     wrap_serialize_with(
         params,
         serialize_with,
         field_tys.as_slice(),
         field_exprs.as_slice(),
     )
 }
 
 fn wrap_serialize_with(
     params: &Parameters,
     serialize_with: &syn::ExprPath,
     field_tys: &[&syn::Type],
-    field_exprs: &[Tokens],
-) -> Tokens {
+    field_exprs: &[TokenStream],
+) -> TokenStream {
     let this = &params.this;
     let (_, ty_generics, where_clause) = params.generics.split_for_impl();
 
     let wrapper_generics = if field_exprs.is_empty() {
         params.generics.clone()
     } else {
         bound::with_lifetime_bound(&params.generics, "'__a")
     };
@@ -1185,26 +1194,26 @@ fn wrap_serialize_with(
 }
 
 // Serialization of an empty struct results in code like:
 //
 //     let mut __serde_state = try!(serializer.serialize_struct("S", 0));
 //     _serde::ser::SerializeStruct::end(__serde_state)
 //
 // where we want to omit the `mut` to avoid a warning.
-fn mut_if(is_mut: bool) -> Option<Tokens> {
+fn mut_if(is_mut: bool) -> Option<TokenStream> {
     if is_mut {
         Some(quote!(mut))
     } else {
         None
     }
 }
 
-fn get_member(params: &Parameters, field: &Field, member: &Member) -> Tokens {
-    let self_var = params.self_var;
+fn get_member(params: &Parameters, field: &Field, member: &Member) -> TokenStream {
+    let self_var = &params.self_var;
     match (params.is_remote, field.attrs.getter()) {
         (false, None) => quote!(&#self_var.#member),
         (true, None) => {
             let inner = quote!(&#self_var.#member);
             let ty = field.ty;
             quote!(_serde::private::ser::constrain::<#ty>(#inner))
         }
         (true, Some(getter)) => {
@@ -1219,31 +1228,31 @@ fn get_member(params: &Parameters, field
 
 enum StructTrait {
     SerializeMap,
     SerializeStruct,
     SerializeStructVariant,
 }
 
 impl StructTrait {
-    fn serialize_field(&self, span: Span) -> Tokens {
+    fn serialize_field(&self, span: Span) -> TokenStream {
         match *self {
             StructTrait::SerializeMap => {
                 quote_spanned!(span=> _serde::ser::SerializeMap::serialize_entry)
             }
             StructTrait::SerializeStruct => {
                 quote_spanned!(span=> _serde::ser::SerializeStruct::serialize_field)
             }
             StructTrait::SerializeStructVariant => {
                 quote_spanned!(span=> _serde::ser::SerializeStructVariant::serialize_field)
             }
         }
     }
 
-    fn skip_field(&self, span: Span) -> Option<Tokens> {
+    fn skip_field(&self, span: Span) -> Option<TokenStream> {
         match *self {
             StructTrait::SerializeMap => None,
             StructTrait::SerializeStruct => {
                 Some(quote_spanned!(span=> _serde::ser::SerializeStruct::skip_field))
             }
             StructTrait::SerializeStructVariant => {
                 Some(quote_spanned!(span=> _serde::ser::SerializeStructVariant::skip_field))
             }
@@ -1253,17 +1262,17 @@ impl StructTrait {
 
 enum TupleTrait {
     SerializeTuple,
     SerializeTupleStruct,
     SerializeTupleVariant,
 }
 
 impl TupleTrait {
-    fn serialize_element(&self, span: Span) -> Tokens {
+    fn serialize_element(&self, span: Span) -> TokenStream {
         match *self {
             TupleTrait::SerializeTuple => {
                 quote_spanned!(span=> _serde::ser::SerializeTuple::serialize_element)
             }
             TupleTrait::SerializeTupleStruct => {
                 quote_spanned!(span=> _serde::ser::SerializeTupleStruct::serialize_field)
             }
             TupleTrait::SerializeTupleVariant => {
--- a/third_party/rust/serde_derive/src/try.rs
+++ b/third_party/rust/serde_derive/src/try.rs
@@ -1,18 +1,17 @@
-use proc_macro2::{Op, Spacing};
-use quote::Tokens;
+use proc_macro2::{Punct, Spacing, TokenStream};
 
 // None of our generated code requires the `From::from` error conversion
 // performed by the standard library's `try!` macro. With this simplified macro
 // we see a significant improvement in type checking and borrow checking time of
 // the generated code and a slight improvement in binary size.
-pub fn replacement() -> Tokens {
+pub fn replacement() -> TokenStream {
     // Cannot pass `$expr` to `quote!` prior to Rust 1.17.0 so interpolate it.
-    let dollar = Op::new('$', Spacing::Alone);
+    let dollar = Punct::new('$', Spacing::Alone);
 
     quote! {
         #[allow(unused_macros)]
         macro_rules! try {
             (#dollar __expr:expr) => {
                 match #dollar __expr {
                     _serde::export::Ok(__val) => __val,
                     _serde::export::Err(__err) => {
copy from third_party/rust/syn/.cargo-checksum.json
copy to third_party/rust/syn-0.13.1/.cargo-checksum.json
copy from third_party/rust/syn/Cargo.toml
copy to third_party/rust/syn-0.13.1/Cargo.toml
new file mode 100644
--- /dev/null
+++ b/third_party/rust/syn-0.13.1/LICENSE-APACHE
@@ -0,0 +1,201 @@
+                              Apache License
+                        Version 2.0, January 2004
+                     http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+   "License" shall mean the terms and conditions for use, reproduction,
+   and distribution as defined by Sections 1 through 9 of this document.
+
+   "Licensor" shall mean the copyright owner or entity authorized by
+   the copyright owner that is granting the License.
+
+   "Legal Entity" shall mean the union of the acting entity and all
+   other entities that control, are controlled by, or are under common
+   control with that entity. For the purposes of this definition,
+   "control" means (i) the power, direct or indirect, to cause the
+   direction or management of such entity, whether by contract or
+   otherwise, or (ii) ownership of fifty percent (50%) or more of the
+   outstanding shares, or (iii) beneficial ownership of such entity.
+
+   "You" (or "Your") shall mean an individual or Legal Entity
+   exercising permissions granted by this License.
+
+   "Source" form shall mean the preferred form for making modifications,
+   including but not limited to software source code, documentation
+   source, and configuration files.
+
+   "Object" form shall mean any form resulting from mechanical
+   transformation or translation of a Source form, including but
+   not limited to compiled object code, generated documentation,
+   and conversions to other media types.
+
+   "Work" shall mean the work of authorship, whether in Source or
+   Object form, made available under the License, as indicated by a
+   copyright notice that is included in or attached to the work
+   (an example is provided in the Appendix below).
+
+   "Derivative Works" shall mean any work, whether in Source or Object
+   form, that is based on (or derived from) the Work and for which the
+   editorial revisions, annotations, elaborations, or other modifications
+   represent, as a whole, an original work of authorship. For the purposes
+   of this License, Derivative Works shall not include works that remain
+   separable from, or merely link (or bind by name) to the interfaces of,
+   the Work and Derivative Works thereof.
+
+   "Contribution" shall mean any work of authorship, including
+   the original version of the Work and any modifications or additions
+   to that Work or Derivative Works thereof, that is intentionally
+   submitted to Licensor for inclusion in the Work by the copyright owner
+   or by an individual or Legal Entity authorized to submit on behalf of
+   the copyright owner. For the purposes of this definition, "submitted"
+   means any form of electronic, verbal, or written communication sent
+   to the Licensor or its representatives, including but not limited to
+   communication on electronic mailing lists, source code control systems,
+   and issue tracking systems that are managed by, or on behalf of, the
+   Licensor for the purpose of discussing and improving the Work, but
+   excluding communication that is conspicuously marked or otherwise
+   designated in writing by the copyright owner as "Not a Contribution."
+
+   "Contributor" shall mean Licensor and any individual or Legal Entity
+   on behalf of whom a Contribution has been received by Licensor and
+   subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   copyright license to reproduce, prepare Derivative Works of,
+   publicly display, publicly perform, sublicense, and distribute the
+   Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+   this License, each Contributor hereby grants to You a perpetual,
+   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+   (except as stated in this section) patent license to make, have made,
+   use, offer to sell, sell, import, and otherwise transfer the Work,
+   where such license applies only to those patent claims licensable
+   by such Contributor that are necessarily infringed by their
+   Contribution(s) alone or by combination of their Contribution(s)
+   with the Work to which such Contribution(s) was submitted. If You
+   institute patent litigation against any entity (including a
+   cross-claim or counterclaim in a lawsuit) alleging that the Work
+   or a Contribution incorporated within the Work constitutes direct
+   or contributory patent infringement, then any patent licenses
+   granted to You under this License for that Work shall terminate
+   as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+   Work or Derivative Works thereof in any medium, with or without
+   modifications, and in Source or Object form, provided that You
+   meet the following conditions:
+
+   (a) You must give any other recipients of the Work or
+       Derivative Works a copy of this License; and
+
+   (b) You must cause any modified files to carry prominent notices
+       stating that You changed the files; and
+
+   (c) You must retain, in the Source form of any Derivative Works
+       that You distribute, all copyright, patent, trademark, and
+       attribution notices from the Source form of the Work,
+       excluding those notices that do not pertain to any part of
+       the Derivative Works; and
+
+   (d) If the Work includes a "NOTICE" text file as part of its
+       distribution, then any Derivative Works that You distribute must
+       include a readable copy of the attribution notices contained
+       within such NOTICE file, excluding those notices that do not
+       pertain to any part of the Derivative Works, in at least one
+       of the following places: within a NOTICE text file distributed
+       as part of the Derivative Works; within the Source form or
+       documentation, if provided along with the Derivative Works; or,
+       within a display generated by the Derivative Works, if and
+       wherever such third-party notices normally appear. The contents
+       of the NOTICE file are for informational purposes only and
+       do not modify the License. You may add Your own attribution
+       notices within Derivative Works that You distribute, alongside
+       or as an addendum to the NOTICE text from the Work, provided
+       that such additional attribution notices cannot be construed
+       as modifying the License.
+
+   You may add Your own copyright statement to Your modifications and
+   may provide additional or different license terms and conditions
+   for use, reproduction, or distribution of Your modifications, or
+   for any such Derivative Works as a whole, provided Your use,
+   reproduction, and distribution of the Work otherwise complies with
+   the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+   any Contribution intentionally submitted for inclusion in the Work
+   by You to the Licensor shall be under the terms and conditions of
+   this License, without any additional terms or conditions.
+   Notwithstanding the above, nothing herein shall supersede or modify
+   the terms of any separate license agreement you may have executed
+   with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+   names, trademarks, service marks, or product names of the Licensor,
+   except as required for reasonable and customary use in describing the
+   origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+   agreed to in writing, Licensor provides the Work (and each
+   Contributor provides its Contributions) on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+   implied, including, without limitation, any warranties or conditions
+   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+   PARTICULAR PURPOSE. You are solely responsible for determining the
+   appropriateness of using or redistributing the Work and assume any
+   risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+   whether in tort (including negligence), contract, or otherwise,
+   unless required by applicable law (such as deliberate and grossly
+   negligent acts) or agreed to in writing, shall any Contributor be
+   liable to You for damages, including any direct, indirect, special,
+   incidental, or consequential damages of any character arising as a
+   result of this License or out of the use or inability to use the
+   Work (including but not limited to damages for loss of goodwill,
+   work stoppage, computer failure or malfunction, or any and all
+   other commercial damages or losses), even if such Contributor
+   has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+   the Work or Derivative Works thereof, You may choose to offer,
+   and charge a fee for, acceptance of support, warranty, indemnity,
+   or other liability obligations and/or rights consistent with this
+   License. However, in accepting such obligations, You may act only
+   on Your own behalf and on Your sole responsibility, not on behalf
+   of any other Contributor, and only if You agree to indemnify,
+   defend, and hold each Contributor harmless for any liability
+   incurred by, or claims asserted against, such Contributor by reason
+   of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
+
+APPENDIX: How to apply the Apache License to your work.
+
+   To apply the Apache License to your work, attach the following
+   boilerplate notice, with the fields enclosed by brackets "[]"
+   replaced with your own identifying information. (Don't include
+   the brackets!)  The text should be enclosed in the appropriate
+   comment syntax for the file format. We also recommend that a
+   file or class name and description of purpose be included on the
+   same "printed page" as the copyright notice for easier
+   identification within third-party archives.
+
+Copyright [yyyy] [name of copyright owner]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+	http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
new file mode 100644
--- /dev/null
+++ b/third_party/rust/syn-0.13.1/LICENSE-MIT
@@ -0,0 +1,25 @@
+Copyright (c) 2018 Syn Developers
+
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
copy from third_party/rust/syn/README.md
copy to third_party/rust/syn-0.13.1/README.md
copy from third_party/rust/syn/src/attr.rs
copy to third_party/rust/syn-0.13.1/src/attr.rs
copy from third_party/rust/syn/src/buffer.rs
copy to third_party/rust/syn-0.13.1/src/buffer.rs
copy from third_party/rust/syn/src/data.rs
copy to third_party/rust/syn-0.13.1/src/data.rs
copy from third_party/rust/syn/src/derive.rs
copy to third_party/rust/syn-0.13.1/src/derive.rs
copy from third_party/rust/syn/src/error.rs
copy to third_party/rust/syn-0.13.1/src/error.rs
copy from third_party/rust/syn/src/expr.rs
copy to third_party/rust/syn-0.13.1/src/expr.rs
copy from third_party/rust/syn/src/file.rs
copy to third_party/rust/syn-0.13.1/src/file.rs
copy from third_party/rust/syn/src/gen/fold.rs
copy to third_party/rust/syn-0.13.1/src/gen/fold.rs
copy from third_party/rust/syn/src/gen/visit.rs
copy to third_party/rust/syn-0.13.1/src/gen/visit.rs
copy from third_party/rust/syn/src/gen/visit_mut.rs
copy to third_party/rust/syn-0.13.1/src/gen/visit_mut.rs
copy from third_party/rust/syn/src/gen_helper.rs
copy to third_party/rust/syn-0.13.1/src/gen_helper.rs
copy from third_party/rust/syn/src/generics.rs
copy to third_party/rust/syn-0.13.1/src/generics.rs
rename from third_party/rust/syn/src/ident.rs
rename to third_party/rust/syn-0.13.1/src/ident.rs
copy from third_party/rust/syn/src/item.rs
copy to third_party/rust/syn-0.13.1/src/item.rs
copy from third_party/rust/syn/src/lib.rs
copy to third_party/rust/syn-0.13.1/src/lib.rs
copy from third_party/rust/syn/src/lifetime.rs
copy to third_party/rust/syn-0.13.1/src/lifetime.rs
copy from third_party/rust/syn/src/lit.rs
copy to third_party/rust/syn-0.13.1/src/lit.rs
copy from third_party/rust/syn/src/mac.rs
copy to third_party/rust/syn-0.13.1/src/mac.rs
copy from third_party/rust/syn/src/macros.rs
copy to third_party/rust/syn-0.13.1/src/macros.rs
copy from third_party/rust/syn/src/op.rs
copy to third_party/rust/syn-0.13.1/src/op.rs
copy from third_party/rust/syn/src/parse_quote.rs
copy to third_party/rust/syn-0.13.1/src/parse_quote.rs
copy from third_party/rust/syn/src/parsers.rs
copy to third_party/rust/syn-0.13.1/src/parsers.rs
copy from third_party/rust/syn/src/path.rs
copy to third_party/rust/syn-0.13.1/src/path.rs
copy from third_party/rust/syn/src/punctuated.rs
copy to third_party/rust/syn-0.13.1/src/punctuated.rs
copy from third_party/rust/syn/src/spanned.rs
copy to third_party/rust/syn-0.13.1/src/spanned.rs
copy from third_party/rust/syn/src/synom.rs
copy to third_party/rust/syn-0.13.1/src/synom.rs
copy from third_party/rust/syn/src/token.rs
copy to third_party/rust/syn-0.13.1/src/token.rs
copy from third_party/rust/syn/src/tt.rs
copy to third_party/rust/syn-0.13.1/src/tt.rs
copy from third_party/rust/syn/src/ty.rs
copy to third_party/rust/syn-0.13.1/src/ty.rs
--- a/third_party/rust/syn/.cargo-checksum.json
+++ b/third_party/rust/syn/.cargo-checksum.json
@@ -1,1 +1,1 @@
-{"files":{"Cargo.toml":"6e30a2a0b1f654eb5baee4a35dd6acc7ba672a9dd390fcc96be14141f3c46957","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"f033c371492a4769d377a8bf1a139adc7bf94ea00595b867a3e234eeab994c8c","README.md":"95ff123a7ebc2968ad9eb5b3bb0f9f2d3d3dca15f32a28432088f9af763cf88f","src/attr.rs":"38e85d2c4e1d7bcf081297d10c6ad6adfd0b15c29285eb01ab71f50cbd19e3a5","src/buffer.rs":"15d2d687102a05dd09cfd1a4bd8c0bf9752dc865606070326ef001e536469233","src/data.rs":"d6514cb3c8f30f72c89abf7118903dfa35887a13e46969de9b170a9661f123a7","src/derive.rs":"eed8e88cd763e852782b10d2366a81e52d3ec0777676eaa94827ea3b46151134","src/error.rs":"db9f0648e7399cfcaef9c431b452b5012a6056e75c586212e2cfdb1e18b8c69a","src/expr.rs":"60d76c1828292b8eacd67dd3aed9d6fbdb696e31ed77390c7224b01bae6d51a4","src/file.rs":"43d5b12828a5c2a0bbef7baa56b0b9638575d73d84e0babaf0e85cdc2d573f16","src/gen/fold.rs":"7bcf7814e6d34b5b7420cb865643f955766448b7e045d16a5188f605d52d6203","src/gen/visit.rs":"a58990558ad865a7cbdd758c35641bd072409e59b743f9fd0fdfd68b995be4f2","src/gen/visit_mut.rs":"68a40b4c34c9968303b43db4bb1cbb2bc6fccf0a36156639f40c1dd2af4f23a2","src/gen_helper.rs":"2be46ff201fd53908350bde4d6c8b7dc427dbd156fa538869a9ccbdf6279af04","src/generics.rs":"2202ba51ba65532af42cbfe70be23a91fb134be433fa21ed129f862832a709a0","src/ident.rs":"8d20690df5778a100c43df0f02d8aec974ac63562e9c7928de0e1954ed83e2e5","src/item.rs":"a42b3316e56fdb7c5cdaf1a839e22a6e7aa29d7cc73e153144c75935fe336cb3","src/lib.rs":"59ee133804c9cccfa35448b421350e30b7a12437476920ddadac9e6664dcb5e7","src/lifetime.rs":"a893013506d23418fbbc1bffcc5d85066de4c5766d693b9451f4a609a70888ba","src/lit.rs":"7b6b4e23dd1a8151a2202bbdd1eda37d0844c065cbb40f4a385c51a627273a7a","src/mac.rs":"ec945e4926de028c153c87f1643e8910e53738d8a4b519b74254479e61acabec","src/macros.rs":"b975b110f9b904e5d82bd0222f7cd0398825fcde146c2b98b581daccf2eb8341","src/op.rs":"b9f9ff8027fc65403858d8fed7d0ac3c919d70f97f830fd7532fb22e1dea993b","src/parse_quote.rs":"b0221332823786d664de3e4976cdd3962ca5fa3c1558bb57f9c8810348ded0b0","src/parsers.rs":"9ef1c5e7760a7a4132fe6834dda5871ff9b6513f51243008f710ee4fe973529d","src/path.rs":"01455551da12e8782b4c97ccb8c670d81ea1db1264393a73577535141d5266a8","src/punctuated.rs":"332277f8867f6ca306e645e440d3582c04e0926f7aa65661c51fb49579530f10","src/spanned.rs":"2624bfac35a6bbe7e628af362772e5cc8d98b9462ec9a8369cf459e431d7ecb4","src/synom.rs":"833d59768e2018706ebe95e3c89c5b6044c5f6306262fe4dc591d59d69058b84","src/token.rs":"af717ca5a0bc404b3351456daa532894697f0f7609f9c0e3d995eeef3f5ff9af","src/tt.rs":"48ec67f4bba2d35c4dd113bb3dc577d34866a72a778bc60bb5f1750ca9b67f0a","src/ty.rs":"9b61ed1b97175036af79eb64b4423c7def2769ed32dc17da16179c0c1393b054"},"package":"91b52877572087400e83d24b9178488541e3d535259e04ff17a63df1e5ceff59"}
\ No newline at end of file
+{"files":{"Cargo.toml":"63955e91c17acc7a7d106b5dc4bd20fb7631110e538185447981caffdedca987","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"f033c371492a4769d377a8bf1a139adc7bf94ea00595b867a3e234eeab994c8c","README.md":"43062927a3e318771d5c2218e5066147b10cdd51cdeb240b06352506cfb7a72c","src/attr.rs":"114556fe97520ee9ff4e8ec408348adbd51757694d353b563a75dcc944d0b050","src/buffer.rs":"d0a4fcf50a04f41cc57f98cfcfcd1efd766b1be18700a810211cfd469010b836","src/data.rs":"95a4e16441d6b31ccc9e31145817eb2a9d7eb49dc148f8005df3e38023950d01","src/derive.rs":"9c1754cd56e85def6ede0bb4d83ecb07862cc1d921e23fc8dad2721f3b29f7b7","src/error.rs":"aa215e0a8a98d85b970a9003565746be9fcdeb24a594b322e591d0ce74ed310b","src/expr.rs":"1ad8ffd9004cd8a9eb56d1309ef720c2d51cdc72ff87f5d182dcf3cbb5ebead0","src/file.rs":"b1ae5b48a1937c475d57de8630d0a99326021e22b107a428342cf8b824330fbc","src/gen/fold.rs":"92fa04ad74d03fe61d13a1c6355a34c36eb9c41e165c0c61746cc9a43b622f35","src/gen/visit.rs":"8a2b8ad57819720780f7bebfc8b53f07eaf631c7269c1553f599fcd412922d6e","src/gen/visit_mut.rs":"1e59e4fd8905beae948b806437edeae551e4907151fd328194e6d6b554cc3445","src/gen_helper.rs":"d128fbd24fadfc5634976bdb9188c649f9905718c9c987a2839c3e6134b155a2","src/generics.rs":"76561f4aa8e62fb77cc2a92addf14715544ede9094760ee3787c27f32c92bd49","src/item.rs":"7b18e67516e9bf25359a8800c313da0daca6fbec018b39e12d40a100a5e4ea6d","src/lib.rs":"f5f0fffd0f93c57131ac1d039a2ab04d10bfb97fef69756f5c5bd57b9ca9395e","src/lifetime.rs":"3291f69bdf452b80f6b283f297714484ed4611f71358ff7abb076c730d25c54b","src/lit.rs":"7ccdd679878c324ad47b0c29e76eb8d9e5d9b00e53cd3767060563bf7048eee9","src/mac.rs":"07c5aab9af440087315d58edeea733dda80bef0240463bf4d324c1ba1e2a272c","src/macros.rs":"e4670f3fb07888cb856f089b01f4775c310e897be2dc1334d8cb814c68f60911","src/op.rs":"7f20c79ac0ccc8cb129b550a305d8c57e58fdc2dd241f5fc1d1634450c26c32b","src/parse_quote.rs":"a3fbe5ad2cbeb967fbb8677f925431baa5303445a128c5284254b453284f21ab","src/parsers.rs":"e251933df616be569bef92daa883331675df7d361714fb1dff2db48da6f25498","src/path.rs":"e120d6e1cae265635e77fdda0b413c99a24d26b059d35d583e30e97dafad49f9","src/punctuated.rs":"154e3a495faccd273cb4da04ed844c5b2fbd02459967a9e2b9e4431cbd4fbe02","src/spanned.rs":"70e8c7d953e76b1ce5330c73d7c0103c02a9b56e8dd8299c621fe0c91c615e23","src/synom.rs":"4aff07973b90930af0734f98d6137445fe461eac8fb97578c6bc5ccc168fddec","src/token.rs":"4e38f0200b5b69821834d8173ece7c7f55a0e5a52b4869724fb03db55acda82b","src/tt.rs":"e0541709fc8038a58156ef590f4449969a752aa96d7722beb41c097a83db5d21","src/ty.rs":"50dce498b1fb6aa1d4bd5274aaac9697bea3587bf7159499d1c218e811b1297a"},"package":"c67da57e61ebc7b7b6fff56bb34440ca3a83db037320b0507af4c10368deda7d"}
\ No newline at end of file
--- a/third_party/rust/syn/Cargo.toml
+++ b/third_party/rust/syn/Cargo.toml
@@ -7,48 +7,51 @@
 #
 # If you believe there's an error in this file please file an
 # issue against the rust-lang/cargo repository. If you're
 # editing this file be aware that the upstream Cargo.toml
 # will likely look very different (and much more reasonable)
 
 [package]
 name = "syn"
-version = "0.13.1"
+version = "0.14.2"
 authors = ["David Tolnay <dtolnay@gmail.com>"]
 include = ["/Cargo.toml", "/src/**/*.rs", "/README.md", "/LICENSE-APACHE", "/LICENSE-MIT"]
 description = "Nom parser for Rust source code"
 documentation = "https://docs.rs/syn"
 readme = "README.md"
 categories = ["development-tools::procedural-macro-helpers"]
 license = "MIT/Apache-2.0"
 repository = "https://github.com/dtolnay/syn"
 [package.metadata.docs.rs]
 all-features = true
 
+[package.metadata.playground]
+all-features = true
+
 [[example]]
 name = "dump-syntax"
 path = "examples/dump-syntax/main.rs"
 required-features = ["full", "parsing", "extra-traits"]
 [dependencies.proc-macro2]
-version = "0.3"
+version = "0.4.4"
 default-features = false
 
 [dependencies.quote]
-version = "0.5"
+version = "0.6"
 optional = true
 default-features = false
 
 [dependencies.unicode-xid]
 version = "0.1"
 [dev-dependencies.rayon]
-version = "1.0.0"
+version = "1.0"
 
 [dev-dependencies.walkdir]
-version = "2"
+version = "2.1"
 
 [features]
 clone-impls = []
 default = ["derive", "parsing", "printing", "clone-impls", "proc-macro"]
 derive = []
 extra-traits = []
 fold = []
 full = []
--- a/third_party/rust/syn/README.md
+++ b/third_party/rust/syn/README.md
@@ -1,14 +1,14 @@
 Nom parser for Rust source code
 ===============================
 
 [![Build Status](https://api.travis-ci.org/dtolnay/syn.svg?branch=master)](https://travis-ci.org/dtolnay/syn)
 [![Latest Version](https://img.shields.io/crates/v/syn.svg)](https://crates.io/crates/syn)
-[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/0.13/syn/)
+[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/syn/0.14/syn/)
 [![Rustc Version 1.15+](https://img.shields.io/badge/rustc-1.15+-lightgray.svg)](https://blog.rust-lang.org/2017/02/02/Rust-1.15.html)
 
 Syn is a parsing library for parsing a stream of Rust tokens into a syntax tree
 of Rust source code.
 
 Currently this library is geared toward the [custom derive] use case but
 contains some APIs that may be useful for Rust procedural macros more generally.
 
@@ -37,46 +37,48 @@ contains some APIs that may be useful fo
   token. These spans allow a procedural macro to display detailed error messages
   pointing to all the right places in the user's code. There is an example of
   this below.
 
 - **Feature flags** — Functionality is aggressively feature gated so your
   procedural macros enable only what they need, and do not pay in compile time
   for all the rest.
 
-[`syn::File`]: https://docs.rs/syn/0.13/syn/struct.File.html
-[`syn::Item`]: https://docs.rs/syn/0.13/syn/enum.Item.html
-[`syn::Expr`]: https://docs.rs/syn/0.13/syn/enum.Expr.html
-[`syn::Type`]: https://docs.rs/syn/0.13/syn/enum.Type.html
-[`syn::DeriveInput`]: https://docs.rs/syn/0.13/syn/struct.DeriveInput.html
+[`syn::File`]: https://docs.rs/syn/0.14/syn/struct.File.html
+[`syn::Item`]: https://docs.rs/syn/0.14/syn/enum.Item.html
+[`syn::Expr`]: https://docs.rs/syn/0.14/syn/enum.Expr.html
+[`syn::Type`]: https://docs.rs/syn/0.14/syn/enum.Type.html
+[`syn::DeriveInput`]: https://docs.rs/syn/0.14/syn/struct.DeriveInput.html
 
 If you get stuck with anything involving procedural macros in Rust I am happy to
 provide help even if the issue is not related to Syn. Please file a ticket in
 this repo.
 
 *Version requirement: Syn supports any compiler version back to Rust's very
 first support for procedural macros in Rust 1.15.0. Some features especially
 around error reporting are only available in newer compilers or on the nightly
 channel.*
 
+[*Release notes*](https://github.com/dtolnay/syn/releases)
+
 ## Example of a custom derive
 
 The canonical custom derive using Syn looks like this. We write an ordinary Rust
 function tagged with a `proc_macro_derive` attribute and the name of the trait
 we are deriving. Any time that derive appears in the user's code, the Rust
 compiler passes their data structure as tokens into our macro. We get to execute
 arbitrary Rust code to figure out what to do with those tokens, then hand some
 tokens back to the compiler to compile into the user's crate.
 
 [`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
 
 ```toml
 [dependencies]
-syn = "0.13"
-quote = "0.5"
+syn = "0.14"
+quote = "0.6"
 
 [lib]
 proc-macro = true
 ```
 
 ```rust
 extern crate proc_macro;
 extern crate syn;
@@ -237,34 +239,36 @@ available.
   tokens of Rust source code.
 - **`visit`** — Trait for traversing a syntax tree.
 - **`visit-mut`** — Trait for traversing and mutating in place a syntax tree.
 - **`fold`** — Trait for transforming an owned syntax tree.
 - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
   types.
 - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
   types.
+- **`proc-macro`** *(enabled by default)* — Runtime dependency on the dynamic
+  library libproc_macro from rustc toolchain.
 
 ## Nightly features
 
 By default Syn uses the [`proc-macro2`] crate to emulate the nightly compiler's
 procedural macro API in a stable way that works all the way back to Rust 1.15.0.
 This shim makes it possible to write code without regard for whether the current
 compiler version supports the features we use.
 
 [`proc-macro2`]: https://github.com/alexcrichton/proc-macro2
 
 On a nightly compiler, to eliminate the stable shim and use the compiler's
 `proc-macro` directly, add `proc-macro2` to your Cargo.toml and set its
 `"nightly"` feature which bypasses the stable shim.
 
 ```toml
 [dependencies]
-syn = "0.13"
-proc-macro2 = { version = "0.3", features = ["nightly"] }
+syn = "0.14"
+proc-macro2 = { version = "0.4", features = ["nightly"] }
 ```
 
 ## License
 
 Licensed under either of
 
  * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
  * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
--- a/third_party/rust/syn/src/attr.rs
+++ b/third_party/rust/syn/src/attr.rs
@@ -69,18 +69,20 @@ ast_struct! {
 }
 
 #[cfg(feature = "extra-traits")]
 impl Eq for Attribute {}
 
 #[cfg(feature = "extra-traits")]
 impl PartialEq for Attribute {
     fn eq(&self, other: &Self) -> bool {
-        self.style == other.style && self.pound_token == other.pound_token
-            && self.bracket_token == other.bracket_token && self.path == other.path
+        self.style == other.style
+            && self.pound_token == other.pound_token
+            && self.bracket_token == other.bracket_token
+            && self.path == other.path
             && TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
             && self.is_sugared_doc == other.is_sugared_doc
     }
 }
 
 #[cfg(feature = "extra-traits")]
 impl Hash for Attribute {
     fn hash<H>(&self, state: &mut H)
@@ -102,91 +104,87 @@ impl Attribute {
     pub fn interpret_meta(&self) -> Option<Meta> {
         let name = if self.path.segments.len() == 1 {
             &self.path.segments.first().unwrap().value().ident
         } else {
             return None;
         };
 
         if self.tts.is_empty() {
-            return Some(Meta::Word(*name));
+            return Some(Meta::Word(name.clone()));
         }
 
         let tts = self.tts.clone().into_iter().collect::<Vec<_>>();
 
         if tts.len() == 1 {
-            if let Some(meta) = Attribute::extract_meta_list(*name, &tts[0]) {
+            if let Some(meta) = Attribute::extract_meta_list(name.clone(), &tts[0]) {
                 return Some(meta);
             }
         }
 
         if tts.len() == 2 {
-            if let Some(meta) = Attribute::extract_name_value(*name, &tts[0], &tts[1]) {
+            if let Some(meta) = Attribute::extract_name_value(name.clone(), &tts[0], &tts[1]) {
                 return Some(meta);
             }
         }
 
         None
     }
 
     fn extract_meta_list(ident: Ident, tt: &TokenTree) -> Option<Meta> {
         let g = match *tt {
             TokenTree::Group(ref g) => g,
             _ => return None,
         };
         if g.delimiter() != Delimiter::Parenthesis {
-            return None
+            return None;
         }
         let tokens = g.stream().clone().into_iter().collect::<Vec<_>>();
         let nested = match list_of_nested_meta_items_from_tokens(&tokens) {
             Some(n) => n,
             None => return None,
         };
         Some(Meta::List(MetaList {
             paren_token: token::Paren(g.span()),
             ident: ident,
             nested: nested,
         }))
     }
 
     fn extract_name_value(ident: Ident, a: &TokenTree, b: &TokenTree) -> Option<Meta> {
         let a = match *a {
-            TokenTree::Op(ref o) => o,
+            TokenTree::Punct(ref o) => o,
             _ => return None,
         };
         if a.spacing() != Spacing::Alone {
-            return None
+            return None;
         }
-        if a.op() != '=' {
-            return None
+        if a.as_char() != '=' {
+            return None;
         }
 
         match *b {
             TokenTree::Literal(ref l) if !l.to_string().starts_with('/') => {
                 Some(Meta::NameValue(MetaNameValue {
                     ident: ident,
                     eq_token: Token![=]([a.span()]),
                     lit: Lit::new(l.clone()),
                 }))
             }
-            TokenTree::Term(ref term) => {
-                match term.as_str() {
-                    v @ "true" | v @ "false" => {
-                        Some(Meta::NameValue(MetaNameValue {
-                            ident: ident,
-                            eq_token: Token![=]([a.span()]),
-                            lit: Lit::Bool(LitBool {
-                                value: v == "true",
-                                span: b.span(),
-                            }),
-                        }))
-                    },
-                    _ => None ,
-                }
-            }
+            TokenTree::Ident(ref v) => match &v.to_string()[..] {
+                v @ "true" | v @ "false" => Some(Meta::NameValue(MetaNameValue {
+                    ident: ident,
+                    eq_token: Token![=]([a.span()]),
+                    lit: Lit::Bool(LitBool {
+                        value: v == "true",
+                        span: b.span(),
+                    }),
+                })),
+                _ => None,
+            },
             _ => None,
         }
     }
 }
 
 fn nested_meta_item_from_tokens(tts: &[TokenTree]) -> Option<(NestedMeta, &[TokenTree])> {
     assert!(!tts.is_empty());
 
@@ -195,53 +193,52 @@ fn nested_meta_item_from_tokens(tts: &[T
             if lit.to_string().starts_with('/') {
                 None
             } else {
                 let lit = Lit::new(lit.clone());
                 Some((NestedMeta::Literal(lit), &tts[1..]))
             }
         }
 
-        TokenTree::Term(sym) => {
-            let ident = Ident::new(sym.as_str(), sym.span());
+        TokenTree::Ident(ref ident) => {
             if tts.len() >= 3 {
-                if let Some(meta) = Attribute::extract_name_value(ident, &tts[1], &tts[2]) {
-                    return Some((NestedMeta::Meta(meta), &tts[3..]))
+                if let Some(meta) = Attribute::extract_name_value(ident.clone(), &tts[1], &tts[2]) {
+                    return Some((NestedMeta::Meta(meta), &tts[3..]));
                 }
             }
 
             if tts.len() >= 2 {
-                if let Some(meta) = Attribute::extract_meta_list(ident, &tts[1]) {
-                    return Some((NestedMeta::Meta(meta), &tts[2..]))
+                if let Some(meta) = Attribute::extract_meta_list(ident.clone(), &tts[1]) {
+                    return Some((NestedMeta::Meta(meta), &tts[2..]));
                 }
             }
 
-            Some((Meta::Word(ident).into(), &tts[1..]))
+            Some((Meta::Word(ident.clone()).into(), &tts[1..]))
         }
 
         _ => None,
     }
 }
 
 fn list_of_nested_meta_items_from_tokens(
     mut tts: &[TokenTree],
 ) -> Option<Punctuated<NestedMeta, Token![,]>> {
     let mut nested_meta_items = Punctuated::new();
     let mut first = true;
 
     while !tts.is_empty() {
         let prev_comma = if first {
             first = false;
             None
-        } else if let TokenTree::Op(ref op) = tts[0] {
+        } else if let TokenTree::Punct(ref op) = tts[0] {
             if op.spacing() != Spacing::Alone {
-                return None
+                return None;
             }
-            if op.op() != ',' {
-                return None
+            if op.as_char() != ',' {
+                return None;
             }
             let tok = Token![,]([op.span()]);
             tts = &tts[1..];
             if tts.is_empty() {
                 break;
             }
             Some(tok)
         } else {
@@ -335,19 +332,19 @@ ast_enum_of_structs! {
 
 impl Meta {
     /// Returns the identifier that begins this structured meta item.
     ///
     /// For example this would return the `test` in `#[test]`, the `derive` in
     /// `#[derive(Copy)]`, and the `path` in `#[path = "sys/windows.rs"]`.
     pub fn name(&self) -> Ident {
         match *self {
-            Meta::Word(ref meta) => *meta,
-            Meta::List(ref meta) => meta.ident,
-            Meta::NameValue(ref meta) => meta.ident,
+            Meta::Word(ref meta) => meta.clone(),
+            Meta::List(ref meta) => meta.ident.clone(),
+            Meta::NameValue(ref meta) => meta.ident.clone(),
         }
     }
 }
 
 ast_enum_of_structs! {
     /// Element of a compile-time attribute list.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
@@ -396,21 +393,21 @@ where
     }
 }
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
     use buffer::Cursor;
     use parse_error;
+    use proc_macro2::{Literal, Punct, Spacing, Span, TokenTree};
     use synom::PResult;
-    use proc_macro2::{Literal, Spacing, Span, TokenTree, Op};
 
     fn eq(span: Span) -> TokenTree {
-        let mut op = Op::new('=', Spacing::Alone);
+        let mut op = Punct::new('=', Spacing::Alone);
         op.set_span(span);
         op.into()
     }
 
     impl Attribute {
         named!(pub parse_inner -> Self, alt!(
             do_parse!(
                 pound: punct!(#) >>
@@ -517,40 +514,41 @@ pub mod parsing {
             _ => parse_error(),
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for Attribute {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.pound_token.to_tokens(tokens);
             if let AttrStyle::Inner(ref b) = self.style {
                 b.to_tokens(tokens);
             }
             self.bracket_token.surround(tokens, |tokens| {
                 self.path.to_tokens(tokens);
                 self.tts.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for MetaList {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.ident.to_tokens(tokens);
             self.paren_token.surround(tokens, |tokens| {
                 self.nested.to_tokens(tokens);
             })
         }
     }
 
     impl ToTokens for MetaNameValue {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.ident.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.lit.to_tokens(tokens);
         }
     }
 }
--- a/third_party/rust/syn/src/buffer.rs
+++ b/third_party/rust/syn/src/buffer.rs
@@ -124,32 +124,32 @@
 //! ```
 
 // This module is heavily commented as it contains the only unsafe code in Syn,
 // and caution should be used when editing it. The public-facing interface is
 // 100% safe but the implementation is fragile internally.
 
 #[cfg(feature = "proc-macro")]
 use proc_macro as pm;
-use proc_macro2::{Delimiter, Literal, Span, Term, TokenStream};
-use proc_macro2::{Group, TokenTree, Op};
+use proc_macro2::{Delimiter, Ident, Literal, Span, TokenStream};
+use proc_macro2::{Group, Punct, TokenTree};
 
+use std::marker::PhantomData;
 use std::ptr;
-use std::marker::PhantomData;
 
 #[cfg(synom_verbose_trace)]
 use std::fmt::{self, Debug};
 
 /// Internal type which is used instead of `TokenTree` to represent a token tree
 /// within a `TokenBuffer`.
 enum Entry {
     // Mimicking types from proc-macro.
     Group(Span, Delimiter, TokenBuffer),
-    Term(Term),
-    Op(Op),
+    Ident(Ident),
+    Punct(Punct),
     Literal(Literal),
     // End entries contain a raw pointer to the entry from the containing
     // token tree, or null if this is the outermost level.
     End(*const Entry),
 }
 
 /// A buffer that can be efficiently traversed multiple times, unlike
 /// `TokenStream` which requires a deep copy in order to traverse more than
@@ -172,21 +172,21 @@ impl TokenBuffer {
     // RETURNS, THE ADDRESS OF ITS BACKING MEMORY MUST REMAIN STABLE.
     fn inner_new(stream: TokenStream, up: *const Entry) -> TokenBuffer {
         // Build up the entries list, recording the locations of any Groups
         // in the list to be processed later.
         let mut entries = Vec::new();
         let mut seqs = Vec::new();
         for tt in stream {
             match tt {
-                TokenTree::Term(sym) => {
-                    entries.push(Entry::Term(sym));
+                TokenTree::Ident(sym) => {
+                    entries.push(Entry::Ident(sym));
                 }
-                TokenTree::Op(op) => {
-                    entries.push(Entry::Op(op));
+                TokenTree::Punct(op) => {
+                    entries.push(Entry::Punct(op));
                 }
                 TokenTree::Literal(l) => {
                     entries.push(Entry::Literal(l));
                 }
                 TokenTree::Group(g) => {
                     // Record the index of the interesting entry, and store an
                     // `End(null)` there temporarially.
                     seqs.push((entries.len(), g.span(), g.delimiter(), g.stream().clone()));
@@ -215,16 +215,19 @@ impl TokenBuffer {
             entries[idx] = Entry::Group(span, delim, inner);
         }
 
         TokenBuffer { data: entries }
     }
 
     /// Creates a `TokenBuffer` containing all the tokens from the input
     /// `TokenStream`.
+    ///
+    /// *This method is available if Syn is built with both the `"parsing"` and
+    /// `"proc-macro"` features.*
     #[cfg(feature = "proc-macro")]
     pub fn new(stream: pm::TokenStream) -> TokenBuffer {
         Self::new2(stream.into())
     }
 
     /// Creates a `TokenBuffer` containing all the tokens from the input
     /// `TokenStream`.
     pub fn new2(stream: TokenStream) -> TokenBuffer {
@@ -267,18 +270,18 @@ pub struct Cursor<'a> {
     marker: PhantomData<&'a Entry>,
 }
 
 impl<'a> Cursor<'a> {
     /// Creates a cursor referencing a static empty TokenStream.
     pub fn empty() -> Self {
         // It's safe in this situation for us to put an `Entry` object in global
         // storage, despite it not actually being safe to send across threads
-        // (`Term` is a reference into a thread-local table). This is because
-        // this entry never includes a `Term` object.
+        // (`Ident` is a reference into a thread-local table). This is because
+        // this entry never includes a `Ident` object.
         //
         // This wrapper struct allows us to break the rules and put a `Sync`
         // object in global storage.
         struct UnsafeSyncEntry(Entry);
         unsafe impl Sync for UnsafeSyncEntry {}
         static EMPTY_ENTRY: UnsafeSyncEntry = UnsafeSyncEntry(Entry::End(0 as *const Entry));
 
         Cursor {
@@ -360,32 +363,32 @@ impl<'a> Cursor<'a> {
             if group_delim == delim {
                 return Some((buf.begin(), span, unsafe { self.bump() }));
             }
         }
 
         None
     }
 
-    /// If the cursor is pointing at a `Term`, returns it along with a cursor
+    /// If the cursor is pointing at a `Ident`, returns it along with a cursor
     /// pointing at the next `TokenTree`.
-    pub fn term(mut self) -> Option<(Term, Cursor<'a>)> {
+    pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
         self.ignore_none();
         match *self.entry() {
-            Entry::Term(term) => Some((term, unsafe { self.bump() })),
+            Entry::Ident(ref ident) => Some((ident.clone(), unsafe { self.bump() })),
             _ => None,
         }
     }
 
-    /// If the cursor is pointing at an `Op`, returns it along with a cursor
+    /// If the cursor is pointing at an `Punct`, returns it along with a cursor
     /// pointing at the next `TokenTree`.
-    pub fn op(mut self) -> Option<(Op, Cursor<'a>)> {
+    pub fn punct(mut self) -> Option<(Punct, Cursor<'a>)> {
         self.ignore_none();
         match *self.entry() {
-            Entry::Op(op) => Some((op, unsafe { self.bump() })),
+            Entry::Punct(ref op) => Some((op.clone(), unsafe { self.bump() })),
             _ => None,
         }
     }
 
     /// If the cursor is pointing at a `Literal`, return it along with a cursor
     /// pointing at the next `TokenTree`.
     pub fn literal(mut self) -> Option<(Literal, Cursor<'a>)> {
         self.ignore_none();
@@ -418,34 +421,34 @@ impl<'a> Cursor<'a> {
         let tree = match *self.entry() {
             Entry::Group(span, delim, ref buf) => {
                 let stream = buf.begin().token_stream();
                 let mut g = Group::new(delim, stream);
                 g.set_span(span);
                 TokenTree::from(g)
             }
             Entry::Literal(ref lit) => lit.clone().into(),
-            Entry::Term(term) => term.into(),
-            Entry::Op(op) => op.into(),
+            Entry::Ident(ref ident) => ident.clone().into(),
+            Entry::Punct(ref op) => op.clone().into(),
             Entry::End(..) => {
                 return None;
             }
         };
 
         Some((tree, unsafe { self.bump() }))
     }
 
     /// Returns the `Span` of the current token, or `Span::call_site()` if this
     /// cursor points to eof.
     pub fn span(self) -> Span {
         match *self.entry() {
             Entry::Group(span, ..) => span,
             Entry::Literal(ref l) => l.span(),
-            Entry::Term(t) => t.span(),
-            Entry::Op(o) => o.span(),
+            Entry::Ident(ref t) => t.span(),
+            Entry::Punct(ref o) => o.span(),
             Entry::End(..) => Span::call_site(),
         }
     }
 }
 
 // We do a custom implementation for `Debug` as the default implementation is
 // pretty useless.
 #[cfg(synom_verbose_trace)]
--- a/third_party/rust/syn/src/data.rs
+++ b/third_party/rust/syn/src/data.rs
@@ -317,72 +317,73 @@ pub mod parsing {
             Some("visibility qualifier such as `pub`")
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for Variant {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(&self.attrs);
             self.ident.to_tokens(tokens);
             self.fields.to_tokens(tokens);
             if let Some((ref eq_token, ref disc)) = self.discriminant {
                 eq_token.to_tokens(tokens);
                 disc.to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for FieldsNamed {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.brace_token.surround(tokens, |tokens| {
                 self.named.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for FieldsUnnamed {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.paren_token.surround(tokens, |tokens| {
                 self.unnamed.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for Field {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(&self.attrs);
             self.vis.to_tokens(tokens);
             if let Some(ref ident) = self.ident {
                 ident.to_tokens(tokens);
                 TokensOrDefault(&self.colon_token).to_tokens(tokens);
             }
             self.ty.to_tokens(tokens);
         }
     }
 
     impl ToTokens for VisPublic {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.pub_token.to_tokens(tokens)
         }
     }
 
     impl ToTokens for VisCrate {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.crate_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for VisRestricted {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.pub_token.to_tokens(tokens);
             self.paren_token.surround(tokens, |tokens| {
                 // XXX: If we have a path which is not "self" or "super" or
                 // "crate", automatically add the "in" token.
                 self.in_token.to_tokens(tokens);
                 self.path.to_tokens(tokens);
             });
         }
--- a/third_party/rust/syn/src/derive.rs
+++ b/third_party/rust/syn/src/derive.rs
@@ -158,20 +158,21 @@ pub mod parsing {
         (wh, data.0, data.1)
     ));
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for DeriveInput {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             for attr in self.attrs.outer() {
                 attr.to_tokens(tokens);
             }
             self.vis.to_tokens(tokens);
             match self.data {
                 Data::Struct(ref d) => d.struct_token.to_tokens(tokens),
                 Data::Enum(ref d) => d.enum_token.to_tokens(tokens),
                 Data::Union(ref d) => d.union_token.to_tokens(tokens),
--- a/third_party/rust/syn/src/error.rs
+++ b/third_party/rust/syn/src/error.rs
@@ -1,33 +1,33 @@
 // Copyright 2018 Syn Developers
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use buffer::Cursor;
 use std::error::Error;
-use buffer::Cursor;
 use std::fmt::{self, Display};
 
 /// The result of a `Synom` parser.
 ///
 /// Refer to the [module documentation] for details about parsing in Syn.
 ///
 /// [module documentation]: index.html
 ///
 /// *This type is available if Syn is built with the `"parsing"` feature.*
 pub type PResult<'a, O> = Result<(O, Cursor<'a>), ParseError>;
 
 /// An error with a default error message.
 ///
 /// NOTE: We should provide better error messages in the future.
-pub fn parse_error<O>() -> PResult<'static, O> {
+pub fn parse_error<'a, O>() -> PResult<'a, O> {
     Err(ParseError(None))
 }
 
 /// Error returned when a `Synom` parser cannot parse the input tokens.
 ///
 /// Refer to the [module documentation] for details about parsing in Syn.
 ///
 /// [module documentation]: index.html
--- a/third_party/rust/syn/src/expr.rs
+++ b/third_party/rust/syn/src/expr.rs
@@ -2,24 +2,24 @@
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 use super::*;
+use proc_macro2::{Span, TokenStream};
 use punctuated::Punctuated;
-use proc_macro2::{Span, TokenStream};
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
+#[cfg(feature = "full")]
+use std::mem;
 #[cfg(feature = "extra-traits")]
 use tt::TokenStreamHelper;
-#[cfg(feature = "full")]
-use std::mem;
 
 ast_enum_of_structs! {
     /// A Rust expression.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
     /// feature.*
     ///
     /// # Syntax tree enums
@@ -632,17 +632,17 @@ ast_struct! {
     pub struct Index #manual_extra_traits {
         pub index: u32,
         pub span: Span,
     }
 }
 
 impl From<usize> for Index {
     fn from(index: usize) -> Index {
-        assert!(index < std::u32::MAX as usize);
+        assert!(index < u32::max_value() as usize);
         Index {
             index: index as u32,
             span: Span::call_site(),
         }
     }
 }
 
 #[cfg(feature = "extra-traits")]
@@ -1015,23 +1015,23 @@ fn arm_expr_requires_comma(expr: &Expr) 
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
     use path::parsing::qpath;
     #[cfg(feature = "full")]
     use path::parsing::ty_no_eq_after;
 
-    #[cfg(feature = "full")]
-    use proc_macro2::TokenStream;
-    use synom::Synom;
     use buffer::Cursor;
     #[cfg(feature = "full")]
     use parse_error;
+    #[cfg(feature = "full")]
+    use proc_macro2::TokenStream;
     use synom::PResult;
+    use synom::Synom;
 
     // When we're parsing expressions which occur before blocks, like in an if
     // statement's condition, we cannot parse a struct literal.
     //
     // Struct literals are ambiguous in certain positions
     // https://github.com/rust-lang/rfcs/pull/92
     macro_rules! ambiguous_expr {
         ($i:expr, $allow_struct:ident) => {
@@ -1586,39 +1586,46 @@ pub mod parsing {
         syn!(ExprLit) => { Expr::Lit }
         |
         syn!(ExprParen) => { Expr::Paren }
         |
         syn!(ExprPath) => { Expr::Path }
     ));
 
     #[cfg(feature = "full")]
-    named!(expr_nosemi -> Expr, map!(alt!(
-        syn!(ExprIf) => { Expr::If }
-        |
-        syn!(ExprIfLet) => { Expr::IfLet }
-        |
-        syn!(ExprWhile) => { Expr::While }
-        |
-        syn!(ExprWhileLet) => { Expr::WhileLet }
-        |
-        syn!(ExprForLoop) => { Expr::ForLoop }
-        |
-        syn!(ExprLoop) => { Expr::Loop }
-        |
-        syn!(ExprMatch) => { Expr::Match }
-        |
-        syn!(ExprCatch) => { Expr::Catch }
-        |
-        syn!(ExprYield) => { Expr::Yield }
-        |
-        syn!(ExprUnsafe) => { Expr::Unsafe }
-        |
-        syn!(ExprBlock) => { Expr::Block }
-    ), Expr::from));
+    named!(expr_nosemi -> Expr, do_parse!(
+        nosemi: alt!(
+            syn!(ExprIf) => { Expr::If }
+            |
+            syn!(ExprIfLet) => { Expr::IfLet }
+            |
+            syn!(ExprWhile) => { Expr::While }
+            |
+            syn!(ExprWhileLet) => { Expr::WhileLet }
+            |
+            syn!(ExprForLoop) => { Expr::ForLoop }
+            |
+            syn!(ExprLoop) => { Expr::Loop }
+            |
+            syn!(ExprMatch) => { Expr::Match }
+            |
+            syn!(ExprCatch) => { Expr::Catch }
+            |
+            syn!(ExprYield) => { Expr::Yield }
+            |
+            syn!(ExprUnsafe) => { Expr::Unsafe }
+            |
+            syn!(ExprBlock) => { Expr::Block }
+        ) >>
+        // If the next token is a `.` or a `?` it is special-cased to parse
+        // as an expression instead of a blockexpression.
+        not!(punct!(.)) >>
+        not!(punct!(?)) >>
+        (nosemi)
+    ));
 
     impl Synom for ExprLit {
         named!(parse -> Self, do_parse!(
             lit: syn!(Lit) >>
             (ExprLit {
                 attrs: Vec::new(),
                 lit: lit,
             })
@@ -1705,17 +1712,17 @@ pub mod parsing {
             punct!(>)
         )) >>
         args: parens!(Punctuated::parse_terminated) >>
         ({
             ExprMethodCall {
                 attrs: Vec::new(),
                 // this expr will get overwritten after being returned
                 receiver: Box::new(Expr::Verbatim(ExprVerbatim {
-                    tts: TokenStream::empty(),
+                    tts: TokenStream::new(),
                 })),
 
                 method: method,
                 turbofish: turbofish.map(|fish| MethodTurbofish {
                     colon2_token: fish.0,
                     lt_token: fish.1,
                     args: fish.2,
                     gt_token: fish.3,
@@ -2166,17 +2173,17 @@ pub mod parsing {
     #[cfg(feature = "full")]
     impl Synom for FieldValue {
         named!(parse -> Self, do_parse!(
             attrs: many0!(Attribute::parse_outer) >>
             field_value: alt!(
                 tuple!(syn!(Member), map!(punct!(:), Some), syn!(Expr))
                 |
                 map!(syn!(Ident), |name| (
-                    Member::Named(name),
+                    Member::Named(name.clone()),
                     None,
                     Expr::Path(ExprPath {
                         attrs: Vec::new(),
                         qself: None,
                         path: name.into(),
                     }),
                 ))
             ) >>
@@ -2400,20 +2407,16 @@ pub mod parsing {
 
     #[cfg(feature = "full")]
     named!(stmt_item -> Stmt, map!(syn!(Item), |i| Stmt::Item(i)));
 
     #[cfg(feature = "full")]
     named!(stmt_blockexpr -> Stmt, do_parse!(
         attrs: many0!(Attribute::parse_outer) >>
         mut e: expr_nosemi >>
-        // If the next token is a `.` or a `?` it is special-cased to parse as
-        // an expression instead of a blockexpression.
-        not!(punct!(.)) >>
-        not!(punct!(?)) >>
         semi: option!(punct!(;)) >>
         ({
             e.replace_attrs(attrs);
             if let Some(semi) = semi {
                 Stmt::Semi(e, semi)
             } else {
                 Stmt::Expr(e)
             }
@@ -2575,17 +2578,17 @@ pub mod parsing {
                 boxed: option!(keyword!(box)) >>
                 by_ref: option!(keyword!(ref)) >>
                 mutability: option!(keyword!(mut)) >>
                 ident: syn!(Ident) >>
                 ({
                     let mut pat: Pat = PatIdent {
                         by_ref: by_ref,
                         mutability: mutability,
-                        ident: ident,
+                        ident: ident.clone(),
                         subpat: None,
                     }.into();
                     if let Some(boxed) = boxed {
                         pat = PatBox {
                             pat: Box::new(pat),
                             box_token: boxed,
                         }.into();
                     }
@@ -2813,173 +2816,173 @@ pub mod parsing {
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
     #[cfg(feature = "full")]
     use attr::FilterAttrs;
-    use quote::{ToTokens, Tokens};
-    use proc_macro2::Literal;
+    use proc_macro2::{Literal, TokenStream};
+    use quote::{ToTokens, TokenStreamExt};
 
     // If the given expression is a bare `ExprStruct`, wraps it in parenthesis
-    // before appending it to `Tokens`.
+    // before appending it to `TokenStream`.
     #[cfg(feature = "full")]
-    fn wrap_bare_struct(tokens: &mut Tokens, e: &Expr) {
+    fn wrap_bare_struct(tokens: &mut TokenStream, e: &Expr) {
         if let Expr::Struct(_) = *e {
             token::Paren::default().surround(tokens, |tokens| {
                 e.to_tokens(tokens);
             });
         } else {
             e.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
-    fn attrs_to_tokens(attrs: &[Attribute], tokens: &mut Tokens) {
+    fn attrs_to_tokens(attrs: &[Attribute], tokens: &mut TokenStream) {
         tokens.append_all(attrs.outer());
     }
 
     #[cfg(not(feature = "full"))]
-    fn attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut Tokens) {}
+    fn attrs_to_tokens(_attrs: &[Attribute], _tokens: &mut TokenStream) {}
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprBox {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.box_token.to_tokens(tokens);
             self.expr.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprInPlace {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.place.to_tokens(tokens);
             self.arrow_token.to_tokens(tokens);
             self.value.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprArray {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.bracket_token.surround(tokens, |tokens| {
                 self.elems.to_tokens(tokens);
             })
         }
     }
 
     impl ToTokens for ExprCall {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.func.to_tokens(tokens);
             self.paren_token.surround(tokens, |tokens| {
                 self.args.to_tokens(tokens);
             })
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprMethodCall {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.receiver.to_tokens(tokens);
             self.dot_token.to_tokens(tokens);
             self.method.to_tokens(tokens);
             self.turbofish.to_tokens(tokens);
             self.paren_token.surround(tokens, |tokens| {
                 self.args.to_tokens(tokens);
             });
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for MethodTurbofish {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.colon2_token.to_tokens(tokens);
             self.lt_token.to_tokens(tokens);
             self.args.to_tokens(tokens);
             self.gt_token.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for GenericMethodArgument {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 GenericMethodArgument::Type(ref t) => t.to_tokens(tokens),
                 GenericMethodArgument::Const(ref c) => c.to_tokens(tokens),
             }
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprTuple {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.paren_token.surround(tokens, |tokens| {
                 self.elems.to_tokens(tokens);
                 // If we only have one argument, we need a trailing comma to
                 // distinguish ExprTuple from ExprParen.
                 if self.elems.len() == 1 && !self.elems.trailing_punct() {
                     <Token![,]>::default().to_tokens(tokens);
                 }
             })
         }
     }
 
     impl ToTokens for ExprBinary {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.left.to_tokens(tokens);
             self.op.to_tokens(tokens);
             self.right.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ExprUnary {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.op.to_tokens(tokens);
             self.expr.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ExprLit {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.lit.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ExprCast {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.expr.to_tokens(tokens);
             self.as_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprType {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.expr.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
-    fn maybe_wrap_else(tokens: &mut Tokens, else_: &Option<(Token![else], Box<Expr>)>) {
+    fn maybe_wrap_else(tokens: &mut TokenStream, else_: &Option<(Token![else], Box<Expr>)>) {
         if let Some((ref else_token, ref else_)) = *else_ {
             else_token.to_tokens(tokens);
 
             // If we are not one of the valid expressions to exist in an else
             // clause, wrap ourselves in a block.
             match **else_ {
                 Expr::If(_) | Expr::IfLet(_) | Expr::Block(_) => {
                     else_.to_tokens(tokens);
@@ -2990,90 +2993,90 @@ mod printing {
                     });
                 }
             }
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprIf {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.if_token.to_tokens(tokens);
             wrap_bare_struct(tokens, &self.cond);
             self.then_branch.to_tokens(tokens);
             maybe_wrap_else(tokens, &self.else_branch);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprIfLet {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.if_token.to_tokens(tokens);
             self.let_token.to_tokens(tokens);
             self.pats.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             wrap_bare_struct(tokens, &self.expr);
             self.then_branch.to_tokens(tokens);
             maybe_wrap_else(tokens, &self.else_branch);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprWhile {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.label.to_tokens(tokens);
             self.while_token.to_tokens(tokens);
             wrap_bare_struct(tokens, &self.cond);
             self.body.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprWhileLet {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.label.to_tokens(tokens);
             self.while_token.to_tokens(tokens);
             self.let_token.to_tokens(tokens);
             self.pats.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             wrap_bare_struct(tokens, &self.expr);
             self.body.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprForLoop {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.label.to_tokens(tokens);
             self.for_token.to_tokens(tokens);
             self.pat.to_tokens(tokens);
             self.in_token.to_tokens(tokens);
             wrap_bare_struct(tokens, &self.expr);
             self.body.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprLoop {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.label.to_tokens(tokens);
             self.loop_token.to_tokens(tokens);
             self.body.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprMatch {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.match_token.to_tokens(tokens);
             wrap_bare_struct(tokens, &self.expr);
             self.brace_token.surround(tokens, |tokens| {
                 for (i, arm) in self.arms.iter().enumerate() {
                     arm.to_tokens(tokens);
                     // Ensure that we have a comma after a non-block arm, except
                     // for the last one.
@@ -3083,36 +3086,36 @@ mod printing {
                     }
                 }
             });
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprCatch {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.do_token.to_tokens(tokens);
             self.catch_token.to_tokens(tokens);
             self.block.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprYield {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.yield_token.to_tokens(tokens);
             self.expr.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprClosure {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.movability.to_tokens(tokens);
             self.capture.to_tokens(tokens);
             self.or1_token.to_tokens(tokens);
             for input in self.inputs.pairs() {
                 match **input.value() {
                     FnArg::Captured(ArgCaptured {
                         ref pat,
@@ -3128,304 +3131,304 @@ mod printing {
             self.or2_token.to_tokens(tokens);
             self.output.to_tokens(tokens);
             self.body.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprUnsafe {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.unsafe_token.to_tokens(tokens);
             self.block.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprBlock {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.block.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprAssign {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.left.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.right.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprAssignOp {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.left.to_tokens(tokens);
             self.op.to_tokens(tokens);
             self.right.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprField {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.base.to_tokens(tokens);
             self.dot_token.to_tokens(tokens);
             self.member.to_tokens(tokens);
         }
     }
 
     impl ToTokens for Member {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
-                Member::Named(ident) => ident.to_tokens(tokens),
+                Member::Named(ref ident) => ident.to_tokens(tokens),
                 Member::Unnamed(ref index) => index.to_tokens(tokens),
             }
         }
     }
 
     impl ToTokens for Index {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             let mut lit = Literal::i64_unsuffixed(i64::from(self.index));
             lit.set_span(self.span);
             tokens.append(lit);
         }
     }
 
     impl ToTokens for ExprIndex {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.expr.to_tokens(tokens);
             self.bracket_token.surround(tokens, |tokens| {
                 self.index.to_tokens(tokens);
             });
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprRange {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.from.to_tokens(tokens);
             match self.limits {
                 RangeLimits::HalfOpen(ref t) => t.to_tokens(tokens),
                 RangeLimits::Closed(ref t) => t.to_tokens(tokens),
             }
             self.to.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ExprPath {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             ::PathTokens(&self.qself, &self.path).to_tokens(tokens)
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprReference {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.and_token.to_tokens(tokens);
             self.mutability.to_tokens(tokens);
             self.expr.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprBreak {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.break_token.to_tokens(tokens);
             self.label.to_tokens(tokens);
             self.expr.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprContinue {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.continue_token.to_tokens(tokens);
             self.label.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprReturn {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.return_token.to_tokens(tokens);
             self.expr.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprMacro {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.mac.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprStruct {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.path.to_tokens(tokens);
             self.brace_token.surround(tokens, |tokens| {
                 self.fields.to_tokens(tokens);
                 if self.rest.is_some() {
                     TokensOrDefault(&self.dot2_token).to_tokens(tokens);
                     self.rest.to_tokens(tokens);
                 }
             })
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprRepeat {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.bracket_token.surround(tokens, |tokens| {
                 self.expr.to_tokens(tokens);
                 self.semi_token.to_tokens(tokens);
                 self.len.to_tokens(tokens);
             })
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprGroup {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.group_token.surround(tokens, |tokens| {
                 self.expr.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for ExprParen {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             attrs_to_tokens(&self.attrs, tokens);
             self.paren_token.surround(tokens, |tokens| {
                 self.expr.to_tokens(tokens);
             });
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for ExprTry {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.expr.to_tokens(tokens);
             self.question_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ExprVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.tts.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for Label {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.name.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for FieldValue {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.member.to_tokens(tokens);
             if let Some(ref colon_token) = self.colon_token {
                 colon_token.to_tokens(tokens);
                 self.expr.to_tokens(tokens);
             }
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for Arm {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(&self.attrs);
             self.leading_vert.to_tokens(tokens);
             self.pats.to_tokens(tokens);
             if let Some((ref if_token, ref guard)) = self.guard {
                 if_token.to_tokens(tokens);
                 guard.to_tokens(tokens);
             }
             self.fat_arrow_token.to_tokens(tokens);
             self.body.to_tokens(tokens);
             self.comma.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatWild {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.underscore_token.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatIdent {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.by_ref.to_tokens(tokens);
             self.mutability.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             if let Some((ref at_token, ref subpat)) = self.subpat {
                 at_token.to_tokens(tokens);
                 subpat.to_tokens(tokens);
             }
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatStruct {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.path.to_tokens(tokens);
             self.brace_token.surround(tokens, |tokens| {
                 self.fields.to_tokens(tokens);
                 // NOTE: We need a comma before the dot2 token if it is present.
                 if !self.fields.empty_or_trailing() && self.dot2_token.is_some() {
                     <Token![,]>::default().to_tokens(tokens);
                 }
                 self.dot2_token.to_tokens(tokens);
             });
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatTupleStruct {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.path.to_tokens(tokens);
             self.pat.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatPath {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             ::PathTokens(&self.qself, &self.path).to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatTuple {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.paren_token.surround(tokens, |tokens| {
                 self.front.to_tokens(tokens);
                 if let Some(ref dot2_token) = self.dot2_token {
                     if !self.front.empty_or_trailing() {
                         // Ensure there is a comma before the .. token.
                         <Token![,]>::default().to_tokens(tokens);
                     }
                     dot2_token.to_tokens(tokens);
@@ -3437,53 +3440,53 @@ mod printing {
                 }
                 self.back.to_tokens(tokens);
             });
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatBox {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.box_token.to_tokens(tokens);
             self.pat.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatRef {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.and_token.to_tokens(tokens);
             self.mutability.to_tokens(tokens);
             self.pat.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatLit {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.expr.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatRange {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.lo.to_tokens(tokens);
             match self.limits {
                 RangeLimits::HalfOpen(ref t) => t.to_tokens(tokens),
                 RangeLimits::Closed(ref t) => Token![...](t.0).to_tokens(tokens),
             }
             self.hi.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatSlice {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             // XXX: This is a mess, and it will be so easy to screw it up. How
             // do we make this correct itself better?
             self.bracket_token.surround(tokens, |tokens| {
                 self.front.to_tokens(tokens);
 
                 // If we need a comma before the middle or standalone .. token,
                 // then make sure it's present.
                 if !self.front.empty_or_trailing()
@@ -3508,66 +3511,66 @@ mod printing {
                     self.comma_token.to_tokens(tokens);
                 }
             })
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatMacro {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.mac.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for PatVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.tts.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for FieldPat {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             if let Some(ref colon_token) = self.colon_token {
                 self.member.to_tokens(tokens);
                 colon_token.to_tokens(tokens);
             }
             self.pat.to_tokens(tokens);
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for Block {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.brace_token.surround(tokens, |tokens| {
                 tokens.append_all(&self.stmts);
             });
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for Stmt {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 Stmt::Local(ref local) => local.to_tokens(tokens),
                 Stmt::Item(ref item) => item.to_tokens(tokens),
                 Stmt::Expr(ref expr) => expr.to_tokens(tokens),
                 Stmt::Semi(ref expr, ref semi) => {
                     expr.to_tokens(tokens);
                     semi.to_tokens(tokens);
                 }
             }
         }
     }
 
     #[cfg(feature = "full")]
     impl ToTokens for Local {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.let_token.to_tokens(tokens);
             self.pats.to_tokens(tokens);
             if let Some((ref colon_token, ref ty)) = self.ty {
                 colon_token.to_tokens(tokens);
                 ty.to_tokens(tokens);
             }
             if let Some((ref eq_token, ref init)) = self.init {
--- a/third_party/rust/syn/src/file.rs
+++ b/third_party/rust/syn/src/file.rs
@@ -106,17 +106,18 @@ pub mod parsing {
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for File {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.inner());
             tokens.append_all(&self.items);
         }
     }
 }
--- a/third_party/rust/syn/src/gen/fold.rs
+++ b/third_party/rust/syn/src/gen/fold.rs
@@ -415,29 +415,27 @@ fn fold_vis_restricted(&mut self, i: Vis
 fn fold_visibility(&mut self, i: Visibility) -> Visibility { fold_visibility(self, i) }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
 fn fold_where_clause(&mut self, i: WhereClause) -> WhereClause { fold_where_clause(self, i) }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
 fn fold_where_predicate(&mut self, i: WherePredicate) -> WherePredicate { fold_where_predicate(self, i) }
 
 }
 
+#[cfg(any(feature = "full", feature = "derive"))]
 macro_rules! fold_span_only {
     ($f:ident : $t:ident) => {
         pub fn $f<V: Fold + ?Sized>(_visitor: &mut V, mut _i: $t) -> $t {
             let span = _visitor.fold_span(_i.span());
             _i.set_span(span);
             _i
         }
     }
 }
 
-fold_span_only!(fold_ident: Ident);
-#[cfg(any(feature = "full", feature = "derive"))]
-fold_span_only!(fold_lifetime: Lifetime);
 #[cfg(any(feature = "full", feature = "derive"))]
 fold_span_only!(fold_lit_byte: LitByte);
 #[cfg(any(feature = "full", feature = "derive"))]
 fold_span_only!(fold_lit_byte_str: LitByteStr);
 #[cfg(any(feature = "full", feature = "derive"))]
 fold_span_only!(fold_lit_char: LitChar);
 #[cfg(any(feature = "full", feature = "derive"))]
 fold_span_only!(fold_lit_float: LitFloat);
@@ -1591,16 +1589,23 @@ pub fn fold_generic_param<V: Fold + ?Siz
 pub fn fold_generics<V: Fold + ?Sized>(_visitor: &mut V, _i: Generics) -> Generics {
     Generics {
         lt_token: (_i . lt_token).map(|it| { Token ! [ < ](tokens_helper(_visitor, &(it).0)) }),
         params: FoldHelper::lift(_i . params, |it| { _visitor.fold_generic_param(it) }),
         gt_token: (_i . gt_token).map(|it| { Token ! [ > ](tokens_helper(_visitor, &(it).0)) }),
         where_clause: (_i . where_clause).map(|it| { _visitor.fold_where_clause(it) }),
     }
 }
+
+pub fn fold_ident<V: Fold + ?Sized>(_visitor: &mut V, _i: Ident) -> Ident {
+    let mut _i = _i;
+    let span = _visitor.fold_span(_i.span());
+    _i.set_span(span);
+    _i
+}
 # [ cfg ( feature = "full" ) ]
 pub fn fold_impl_item<V: Fold + ?Sized>(_visitor: &mut V, _i: ImplItem) -> ImplItem {
     match _i {
         ImplItem::Const(_binding_0, ) => {
             ImplItem::Const (
                 _visitor.fold_impl_item_const(_binding_0),
             )
         }
@@ -1975,16 +1980,23 @@ pub fn fold_item_verbatim<V: Fold + ?Siz
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ] # [ cfg ( feature = "full" ) ]
 pub fn fold_label<V: Fold + ?Sized>(_visitor: &mut V, _i: Label) -> Label {
     Label {
         name: _visitor.fold_lifetime(_i . name),
         colon_token: Token ! [ : ](tokens_helper(_visitor, &(_i . colon_token).0)),
     }
 }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
+pub fn fold_lifetime<V: Fold + ?Sized>(_visitor: &mut V, _i: Lifetime) -> Lifetime {
+    Lifetime {
+        apostrophe: _i . apostrophe,
+        ident: _visitor.fold_ident(_i . ident),
+    }
+}
+# [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
 pub fn fold_lifetime_def<V: Fold + ?Sized>(_visitor: &mut V, _i: LifetimeDef) -> LifetimeDef {
     LifetimeDef {
         attrs: FoldHelper::lift(_i . attrs, |it| { _visitor.fold_attribute(it) }),
         lifetime: _visitor.fold_lifetime(_i . lifetime),
         colon_token: (_i . colon_token).map(|it| { Token ! [ : ](tokens_helper(_visitor, &(it).0)) }),
         bounds: FoldHelper::lift(_i . bounds, |it| { _visitor.fold_lifetime(it) }),
     }
 }
--- a/third_party/rust/syn/src/gen/visit.rs
+++ b/third_party/rust/syn/src/gen/visit.rs
@@ -1241,17 +1241,16 @@ pub fn visit_generic_param<'ast, V: Visi
 pub fn visit_generics<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Generics) {
     if let Some(ref it) = _i . lt_token { tokens_helper(_visitor, &(it).0) };
     for el in Punctuated::pairs(& _i . params) { let it = el.value(); _visitor.visit_generic_param(it) };
     if let Some(ref it) = _i . gt_token { tokens_helper(_visitor, &(it).0) };
     if let Some(ref it) = _i . where_clause { _visitor.visit_where_clause(it) };
 }
 
 pub fn visit_ident<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Ident) {
-    // Skipped field _i . term;
 }
 # [ cfg ( feature = "full" ) ]
 pub fn visit_impl_item<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast ImplItem) {
     match *_i {
         ImplItem::Const(ref _binding_0, ) => {
             _visitor.visit_impl_item_const(_binding_0);
         }
         ImplItem::Method(ref _binding_0, ) => {
@@ -1540,17 +1539,18 @@ pub fn visit_item_verbatim<'ast, V: Visi
 }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ] # [ cfg ( feature = "full" ) ]
 pub fn visit_label<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Label) {
     _visitor.visit_lifetime(& _i . name);
     tokens_helper(_visitor, &(& _i . colon_token).0);
 }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
 pub fn visit_lifetime<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast Lifetime) {
-    // Skipped field _i . term;
+    // Skipped field _i . apostrophe;
+    _visitor.visit_ident(& _i . ident);
 }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
 pub fn visit_lifetime_def<'ast, V: Visit<'ast> + ?Sized>(_visitor: &mut V, _i: &'ast LifetimeDef) {
     for it in & _i . attrs { _visitor.visit_attribute(it) };
     _visitor.visit_lifetime(& _i . lifetime);
     if let Some(ref it) = _i . colon_token { tokens_helper(_visitor, &(it).0) };
     for el in Punctuated::pairs(& _i . bounds) { let it = el.value(); _visitor.visit_lifetime(it) };
 }
--- a/third_party/rust/syn/src/gen/visit_mut.rs
+++ b/third_party/rust/syn/src/gen/visit_mut.rs
@@ -1242,17 +1242,16 @@ pub fn visit_generic_param_mut<V: VisitM
 pub fn visit_generics_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Generics) {
     if let Some(ref mut it) = _i . lt_token { tokens_helper(_visitor, &mut (it).0) };
     for mut el in Punctuated::pairs_mut(& mut _i . params) { let it = el.value_mut(); _visitor.visit_generic_param_mut(it) };
     if let Some(ref mut it) = _i . gt_token { tokens_helper(_visitor, &mut (it).0) };
     if let Some(ref mut it) = _i . where_clause { _visitor.visit_where_clause_mut(it) };
 }
 
 pub fn visit_ident_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Ident) {
-    // Skipped field _i . term;
 }
 # [ cfg ( feature = "full" ) ]
 pub fn visit_impl_item_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut ImplItem) {
     match *_i {
         ImplItem::Const(ref mut _binding_0, ) => {
             _visitor.visit_impl_item_const_mut(_binding_0);
         }
         ImplItem::Method(ref mut _binding_0, ) => {
@@ -1541,17 +1540,18 @@ pub fn visit_item_verbatim_mut<V: VisitM
 }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ] # [ cfg ( feature = "full" ) ]
 pub fn visit_label_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Label) {
     _visitor.visit_lifetime_mut(& mut _i . name);
     tokens_helper(_visitor, &mut (& mut _i . colon_token).0);
 }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
 pub fn visit_lifetime_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut Lifetime) {
-    // Skipped field _i . term;
+    // Skipped field _i . apostrophe;
+    _visitor.visit_ident_mut(& mut _i . ident);
 }
 # [ cfg ( any ( feature = "full" , feature = "derive" ) ) ]
 pub fn visit_lifetime_def_mut<V: VisitMut + ?Sized>(_visitor: &mut V, _i: &mut LifetimeDef) {
     for it in & mut _i . attrs { _visitor.visit_attribute_mut(it) };
     _visitor.visit_lifetime_mut(& mut _i . lifetime);
     if let Some(ref mut it) = _i . colon_token { tokens_helper(_visitor, &mut (it).0) };
     for mut el in Punctuated::pairs_mut(& mut _i . bounds) { let it = el.value_mut(); _visitor.visit_lifetime_mut(it) };
 }
--- a/third_party/rust/syn/src/gen_helper.rs
+++ b/third_party/rust/syn/src/gen_helper.rs
@@ -3,19 +3,19 @@
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 #[cfg(feature = "fold")]
 pub mod fold {
-    use punctuated::{Pair, Punctuated};
     use fold::Fold;
     use proc_macro2::Span;
+    use punctuated::{Pair, Punctuated};
 
     pub trait FoldHelper {
         type Item;
         fn lift<F>(self, f: F) -> Self
         where
             F: FnMut(Self::Item) -> Self::Item;
     }
 
@@ -79,20 +79,17 @@ pub mod fold {
     }
 }
 
 #[cfg(feature = "visit")]
 pub mod visit {
     use proc_macro2::Span;
     use visit::Visit;
 
-    pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(
-        visitor: &mut V,
-        spans: &'ast S,
-    ) {
+    pub fn tokens_helper<'ast, V: Visit<'ast> + ?Sized, S: Spans>(visitor: &mut V, spans: &'ast S) {
         spans.visit(visitor);
     }
 
     pub trait Spans {
         fn visit<'ast, V: Visit<'ast> + ?Sized>(&'ast self, visitor: &mut V);
     }
 
     impl Spans for Span {
--- a/third_party/rust/syn/src/generics.rs
+++ b/third_party/rust/syn/src/generics.rs
@@ -302,22 +302,24 @@ pub struct TypeGenerics<'a>(&'a Generics
 pub struct Turbofish<'a>(&'a Generics);
 
 #[cfg(feature = "printing")]
 impl Generics {
     /// Split a type's generics into the pieces required for impl'ing a trait
     /// for that type.
     ///
     /// ```
+    /// # extern crate proc_macro2;
     /// # extern crate syn;
     /// # #[macro_use]
     /// # extern crate quote;
+    /// # use proc_macro2::{Span, Ident};
     /// # fn main() {
     /// # let generics: syn::Generics = Default::default();
-    /// # let name = syn::Ident::from("MyType");
+    /// # let name = Ident::new("MyType", Span::call_site());
     /// let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
     /// quote! {
     ///     impl #impl_generics MyTrait for #name #ty_generics #where_clause {
     ///         // ...
     ///     }
     /// }
     /// # ;
     /// # }
@@ -481,18 +483,18 @@ ast_enum_of_structs! {
         }),
     }
 }
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
 
+    use punctuated::Pair;
     use synom::Synom;
-    use punctuated::Pair;
 
     impl Synom for Generics {
         named!(parse -> Self, map!(
             alt!(
                 do_parse!(
                     lt: punct!(<) >>
                     lifetimes: call!(Punctuated::<LifetimeDef, Token![,]>::parse_terminated) >>
                     ty_params: cond!(
@@ -746,20 +748,21 @@ pub mod parsing {
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for Generics {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             if self.params.is_empty() {
                 return;
             }
 
             TokensOrDefault(&self.lt_token).to_tokens(tokens);
 
             // Print lifetimes before types and consts, regardless of their
             // order in self.params.
@@ -786,17 +789,17 @@ mod printing {
                 }
             }
 
             TokensOrDefault(&self.gt_token).to_tokens(tokens);
         }
     }
 
     impl<'a> ToTokens for ImplGenerics<'a> {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             if self.0.params.is_empty() {
                 return;
             }
 
             TokensOrDefault(&self.0.lt_token).to_tokens(tokens);
 
             // Print lifetimes before types and consts, regardless of their
             // order in self.params.
@@ -841,17 +844,17 @@ mod printing {
                 param.punct().to_tokens(tokens);
             }
 
             TokensOrDefault(&self.0.gt_token).to_tokens(tokens);
         }
     }
 
     impl<'a> ToTokens for TypeGenerics<'a> {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             if self.0.params.is_empty() {
                 return;
             }
 
             TokensOrDefault(&self.0.lt_token).to_tokens(tokens);
 
             // Print lifetimes before types and consts, regardless of their
             // order in self.params.
@@ -889,124 +892,124 @@ mod printing {
                 param.punct().to_tokens(tokens);
             }
 
             TokensOrDefault(&self.0.gt_token).to_tokens(tokens);
         }
     }
 
     impl<'a> ToTokens for Turbofish<'a> {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             if !self.0.params.is_empty() {
                 <Token![::]>::default().to_tokens(tokens);
                 TypeGenerics(self.0).to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for BoundLifetimes {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.for_token.to_tokens(tokens);
             self.lt_token.to_tokens(tokens);
             self.lifetimes.to_tokens(tokens);
             self.gt_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for LifetimeDef {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.lifetime.to_tokens(tokens);
             if !self.bounds.is_empty() {
                 TokensOrDefault(&self.colon_token).to_tokens(tokens);
                 self.bounds.to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for TypeParam {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.ident.to_tokens(tokens);
             if !self.bounds.is_empty() {
                 TokensOrDefault(&self.colon_token).to_tokens(tokens);
                 self.bounds.to_tokens(tokens);
             }
             if self.default.is_some() {
                 TokensOrDefault(&self.eq_token).to_tokens(tokens);
                 self.default.to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for TraitBound {
-        fn to_tokens(&self, tokens: &mut Tokens) {
-            let to_tokens = |tokens: &mut Tokens| {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
+            let to_tokens = |tokens: &mut TokenStream| {
                 self.modifier.to_tokens(tokens);
                 self.lifetimes.to_tokens(tokens);
                 self.path.to_tokens(tokens);
             };
             match self.paren_token {
                 Some(ref paren) => paren.surround(tokens, to_tokens),
                 None => to_tokens(tokens),
             }
         }
     }
 
     impl ToTokens for TraitBoundModifier {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 TraitBoundModifier::None => {}
                 TraitBoundModifier::Maybe(ref t) => t.to_tokens(tokens),
             }
         }
     }
 
     impl ToTokens for ConstParam {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.const_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             if self.default.is_some() {
                 TokensOrDefault(&self.eq_token).to_tokens(tokens);
                 self.default.to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for WhereClause {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             if !self.predicates.is_empty() {
                 self.where_token.to_tokens(tokens);
                 self.predicates.to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for PredicateType {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.lifetimes.to_tokens(tokens);
             self.bounded_ty.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.bounds.to_tokens(tokens);
         }
     }
 
     impl ToTokens for PredicateLifetime {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.lifetime.to_tokens(tokens);
             if !self.bounds.is_empty() {
                 TokensOrDefault(&self.colon_token).to_tokens(tokens);
                 self.bounds.to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for PredicateEq {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.lhs_ty.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.rhs_ty.to_tokens(tokens);
         }
     }
 }
--- a/third_party/rust/syn/src/item.rs
+++ b/third_party/rust/syn/src/item.rs
@@ -3,24 +3,24 @@
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 use super::*;
 use derive::{Data, DeriveInput};
+use proc_macro2::TokenStream;
 use punctuated::Punctuated;
-use proc_macro2::TokenStream;
 use token::{Brace, Paren};
 
 #[cfg(feature = "extra-traits")]
-use tt::TokenStreamHelper;
+use std::hash::{Hash, Hasher};
 #[cfg(feature = "extra-traits")]
-use std::hash::{Hash, Hasher};
+use tt::TokenStreamHelper;
 
 ast_enum_of_structs! {
     /// Things that can appear directly inside of a module or scope.
     ///
     /// *This type is available if Syn is built with the `"full"` feature.*
     ///
     /// # Syntax tree enum
     ///
@@ -243,18 +243,21 @@ ast_enum_of_structs! {
 }
 
 #[cfg(feature = "extra-traits")]
 impl Eq for ItemMacro2 {}
 
 #[cfg(feature = "extra-traits")]
 impl PartialEq for ItemMacro2 {
     fn eq(&self, other: &Self) -> bool {
-        self.attrs == other.attrs && self.vis == other.vis && self.macro_token == other.macro_token
-            && self.ident == other.ident && self.paren_token == other.paren_token
+        self.attrs == other.attrs
+            && self.vis == other.vis
+            && self.macro_token == other.macro_token
+            && self.ident == other.ident
+            && self.paren_token == other.paren_token
             && TokenStreamHelper(&self.args) == TokenStreamHelper(&other.args)
             && self.brace_token == other.brace_token
             && TokenStreamHelper(&self.body) == TokenStreamHelper(&other.body)
     }
 }
 
 #[cfg(feature = "extra-traits")]
 impl Hash for ItemMacro2 {
@@ -1531,145 +1534,146 @@ pub mod parsing {
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
     use attr::FilterAttrs;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for ItemExternCrate {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.extern_token.to_tokens(tokens);
             self.crate_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             if let Some((ref as_token, ref rename)) = self.rename {
                 as_token.to_tokens(tokens);
                 rename.to_tokens(tokens);
             }
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ItemUse {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.use_token.to_tokens(tokens);
             self.leading_colon.to_tokens(tokens);
             self.tree.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ItemStatic {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.static_token.to_tokens(tokens);
             self.mutability.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.expr.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ItemConst {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.const_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.expr.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ItemFn {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.constness.to_tokens(tokens);
             self.unsafety.to_tokens(tokens);
             self.abi.to_tokens(tokens);
-            NamedDecl(&self.decl, self.ident).to_tokens(tokens);
+            NamedDecl(&self.decl, &self.ident).to_tokens(tokens);
             self.block.brace_token.surround(tokens, |tokens| {
                 tokens.append_all(self.attrs.inner());
                 tokens.append_all(&self.block.stmts);
             });
         }
     }
 
     impl ToTokens for ItemMod {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.mod_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             if let Some((ref brace, ref items)) = self.content {
                 brace.surround(tokens, |tokens| {
                     tokens.append_all(self.attrs.inner());
                     tokens.append_all(items);
                 });
             } else {
                 TokensOrDefault(&self.semi).to_tokens(tokens);
             }
         }
     }
 
     impl ToTokens for ItemForeignMod {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.abi.to_tokens(tokens);
             self.brace_token.surround(tokens, |tokens| {
                 tokens.append_all(&self.items);
             });
         }
     }
 
     impl ToTokens for ItemType {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.type_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             self.generics.where_clause.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ItemEnum {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.enum_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             self.generics.where_clause.to_tokens(tokens);
             self.brace_token.surround(tokens, |tokens| {
                 self.variants.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for ItemStruct {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.struct_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             match self.fields {
                 Fields::Named(ref fields) => {
                     self.generics.where_clause.to_tokens(tokens);
@@ -1684,29 +1688,29 @@ mod printing {
                     self.generics.where_clause.to_tokens(tokens);
                     TokensOrDefault(&self.semi_token).to_tokens(tokens);
                 }
             }
         }
     }
 
     impl ToTokens for ItemUnion {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.union_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             self.generics.where_clause.to_tokens(tokens);
             self.fields.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ItemTrait {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.unsafety.to_tokens(tokens);
             self.auto_token.to_tokens(tokens);
             self.trait_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             if !self.supertraits.is_empty() {
@@ -1716,17 +1720,17 @@ mod printing {
             self.generics.where_clause.to_tokens(tokens);
             self.brace_token.surround(tokens, |tokens| {
                 tokens.append_all(&self.items);
             });
         }
     }
 
     impl ToTokens for ItemImpl {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.defaultness.to_tokens(tokens);
             self.unsafety.to_tokens(tokens);
             self.impl_token.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             if let Some((ref polarity, ref path, ref for_token)) = self.trait_ {
                 polarity.to_tokens(tokens);
                 path.to_tokens(tokens);
@@ -1737,17 +1741,17 @@ mod printing {
             self.brace_token.surround(tokens, |tokens| {
                 tokens.append_all(self.attrs.inner());
                 tokens.append_all(&self.items);
             });
         }
     }
 
     impl ToTokens for ItemMacro {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.mac.path.to_tokens(tokens);
             self.mac.bang_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             match self.mac.delimiter {
                 MacroDelimiter::Paren(ref paren) => {
                     paren.surround(tokens, |tokens| self.mac.tts.to_tokens(tokens));
                 }
@@ -1758,89 +1762,89 @@ mod printing {
                     bracket.surround(tokens, |tokens| self.mac.tts.to_tokens(tokens));
                 }
             }
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ItemMacro2 {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.macro_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.paren_token.surround(tokens, |tokens| {
                 self.args.to_tokens(tokens);
             });
             self.brace_token.surround(tokens, |tokens| {
                 self.body.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for ItemVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.tts.to_tokens(tokens);
         }
     }
 
     impl ToTokens for UsePath {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.ident.to_tokens(tokens);
             self.colon2_token.to_tokens(tokens);
             self.tree.to_tokens(tokens);
         }
     }
 
     impl ToTokens for UseName {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.ident.to_tokens(tokens);
         }
     }
 
     impl ToTokens for UseRename {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.ident.to_tokens(tokens);
             self.as_token.to_tokens(tokens);
             self.rename.to_tokens(tokens);
         }
     }
 
     impl ToTokens for UseGlob {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.star_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for UseGroup {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.brace_token.surround(tokens, |tokens| {
                 self.items.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for TraitItemConst {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.const_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             if let Some((ref eq_token, ref default)) = self.default {
                 eq_token.to_tokens(tokens);
                 default.to_tokens(tokens);
             }
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TraitItemMethod {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.sig.to_tokens(tokens);
             match self.default {
                 Some(ref block) => {
                     block.brace_token.surround(tokens, |tokens| {
                         tokens.append_all(self.attrs.inner());
                         tokens.append_all(&block.stmts);
                     });
@@ -1848,17 +1852,17 @@ mod printing {
                 None => {
                     TokensOrDefault(&self.semi_token).to_tokens(tokens);
                 }
             }
         }
     }
 
     impl ToTokens for TraitItemType {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.type_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             if !self.bounds.is_empty() {
                 TokensOrDefault(&self.colon_token).to_tokens(tokens);
                 self.bounds.to_tokens(tokens);
             }
@@ -1867,167 +1871,167 @@ mod printing {
                 eq_token.to_tokens(tokens);
                 default.to_tokens(tokens);
             }
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TraitItemMacro {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.mac.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TraitItemVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.tts.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ImplItemConst {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.defaultness.to_tokens(tokens);
             self.const_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.expr.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ImplItemMethod {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.defaultness.to_tokens(tokens);
             self.sig.to_tokens(tokens);
             self.block.brace_token.surround(tokens, |tokens| {
                 tokens.append_all(self.attrs.inner());
                 tokens.append_all(&self.block.stmts);
             });
         }
     }
 
     impl ToTokens for ImplItemType {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.defaultness.to_tokens(tokens);
             self.type_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.generics.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ImplItemMacro {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.mac.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ImplItemVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.tts.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ForeignItemFn {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
-            NamedDecl(&self.decl, self.ident).to_tokens(tokens);
+            NamedDecl(&self.decl, &self.ident).to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ForeignItemStatic {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.static_token.to_tokens(tokens);
             self.mutability.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ForeignItemType {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.attrs.outer());
             self.vis.to_tokens(tokens);
             self.type_token.to_tokens(tokens);
             self.ident.to_tokens(tokens);
             self.semi_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ForeignItemVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.tts.to_tokens(tokens);
         }
     }
 
     impl ToTokens for MethodSig {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.constness.to_tokens(tokens);
             self.unsafety.to_tokens(tokens);
             self.abi.to_tokens(tokens);
-            NamedDecl(&self.decl, self.ident).to_tokens(tokens);
+            NamedDecl(&self.decl, &self.ident).to_tokens(tokens);
         }
     }
 
-    struct NamedDecl<'a>(&'a FnDecl, Ident);
+    struct NamedDecl<'a>(&'a FnDecl, &'a Ident);
 
     impl<'a> ToTokens for NamedDecl<'a> {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.0.fn_token.to_tokens(tokens);
             self.1.to_tokens(tokens);
             self.0.generics.to_tokens(tokens);
             self.0.paren_token.surround(tokens, |tokens| {
                 self.0.inputs.to_tokens(tokens);
                 if self.0.variadic.is_some() && !self.0.inputs.empty_or_trailing() {
                     <Token![,]>::default().to_tokens(tokens);
                 }
                 self.0.variadic.to_tokens(tokens);
             });
             self.0.output.to_tokens(tokens);
             self.0.generics.where_clause.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ArgSelfRef {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.and_token.to_tokens(tokens);
             self.lifetime.to_tokens(tokens);
             self.mutability.to_tokens(tokens);
             self.self_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ArgSelf {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.mutability.to_tokens(tokens);
             self.self_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ArgCaptured {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.pat.to_tokens(tokens);
             self.colon_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
         }
     }
 }
--- a/third_party/rust/syn/src/lib.rs
+++ b/third_party/rust/syn/src/lib.rs
@@ -63,18 +63,18 @@
 //! get to execute arbitrary Rust code to figure out what to do with those
 //! tokens, then hand some tokens back to the compiler to compile into the
 //! user's crate.
 //!
 //! [`TokenStream`]: https://doc.rust-lang.org/proc_macro/struct.TokenStream.html
 //!
 //! ```toml
 //! [dependencies]
-//! syn = "0.13"
-//! quote = "0.5"
+//! syn = "0.14"
+//! quote = "0.6"
 //!
 //! [lib]
 //! proc-macro = true
 //! ```
 //!
 //! ```rust
 //! extern crate proc_macro;
 //! extern crate syn;
@@ -249,109 +249,134 @@
 //! - **`visit`** — Trait for traversing a syntax tree.
 //! - **`visit-mut`** — Trait for traversing and mutating in place a syntax
 //!   tree.
 //! - **`fold`** — Trait for transforming an owned syntax tree.
 //! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
 //!   types.
 //! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
 //!   types.
+//! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the
+//!   dynamic library libproc_macro from rustc toolchain.
 
 // Syn types in rustdoc of other crates get linked to here.
-#![doc(html_root_url = "https://docs.rs/syn/0.13.1")]
-#![cfg_attr(feature = "cargo-clippy",
-            allow(const_static_lifetime, doc_markdown, large_enum_variant, match_bool,
-                  redundant_closure, needless_pass_by_value, redundant_field_names))]
+#![doc(html_root_url = "https://docs.rs/syn/0.14.2")]
+#![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
+// Ignored clippy lints.
+#![cfg_attr(
+    feature = "cargo-clippy",
+    allow(
+        const_static_lifetime, doc_markdown, large_enum_variant, match_bool, redundant_closure,
+        needless_pass_by_value, redundant_field_names
+    )
+)]
+// Ignored clippy_pedantic lints.
+#![cfg_attr(
+    feature = "cargo-clippy",
+    allow(
+        cast_possible_truncation, cast_possible_wrap, if_not_else, items_after_statements,
+        similar_names, single_match_else, stutter, unseparated_literal_suffix, use_self,
+        used_underscore_binding
+    )
+)]
 
-extern crate proc_macro2;
 #[cfg(feature = "proc-macro")]
 extern crate proc_macro;
+extern crate proc_macro2;
 extern crate unicode_xid;
 
 #[cfg(feature = "printing")]
 extern crate quote;
 
 #[cfg(feature = "parsing")]
 #[macro_use]
 #[doc(hidden)]
 pub mod parsers;
 
 #[macro_use]
 mod macros;
 
 #[macro_use]
 pub mod token;
 
+pub use proc_macro2::Ident;
+
 #[cfg(any(feature = "full", feature = "derive"))]
 mod attr;
 #[cfg(any(feature = "full", feature = "derive"))]
 pub use attr::{AttrStyle, Attribute, Meta, MetaList, MetaNameValue, NestedMeta};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod data;
 #[cfg(any(feature = "full", feature = "derive"))]
-pub use data::{Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic,
-               VisRestricted, Visibility};
+pub use data::{
+    Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted,
+    Visibility,
+};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod expr;
 #[cfg(any(feature = "full", feature = "derive"))]
-pub use expr::{Expr, ExprReference, ExprArray, ExprAssign, ExprAssignOp, ExprBinary, ExprBlock,
-               ExprBox, ExprBreak, ExprCall, ExprCast, ExprCatch, ExprClosure, ExprContinue,
-               ExprField, ExprForLoop, ExprGroup, ExprIf, ExprIfLet, ExprInPlace, ExprIndex,
-               ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall, ExprParen, ExprPath,
-               ExprRange, ExprRepeat, ExprReturn, ExprStruct, ExprTry, ExprTuple, ExprType,
-               ExprUnary, ExprUnsafe, ExprVerbatim, ExprWhile, ExprWhileLet, ExprYield, Index,
-               Member};
+pub use expr::{
+    Expr, ExprArray, ExprAssign, ExprAssignOp, ExprBinary, ExprBlock, ExprBox, ExprBreak, ExprCall,
+    ExprCast, ExprCatch, ExprClosure, ExprContinue, ExprField, ExprForLoop, ExprGroup, ExprIf,
+    ExprIfLet, ExprInPlace, ExprIndex, ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall,
+    ExprParen, ExprPath, ExprRange, ExprReference, ExprRepeat, ExprReturn, ExprStruct, ExprTry,
+    ExprTuple, ExprType, ExprUnary, ExprUnsafe, ExprVerbatim, ExprWhile, ExprWhileLet, ExprYield,
+    Index, Member,
+};
 
 #[cfg(feature = "full")]
-pub use expr::{Arm, Block, FieldPat, FieldValue, GenericMethodArgument, Label, Local,
-               MethodTurbofish, Pat, PatBox, PatIdent, PatLit, PatMacro, PatPath, PatRange,
-               PatRef, PatSlice, PatStruct, PatTuple, PatTupleStruct, PatVerbatim, PatWild,
-               RangeLimits, Stmt};
+pub use expr::{
+    Arm, Block, FieldPat, FieldValue, GenericMethodArgument, Label, Local, MethodTurbofish, Pat,
+    PatBox, PatIdent, PatLit, PatMacro, PatPath, PatRange, PatRef, PatSlice, PatStruct, PatTuple,
+    PatTupleStruct, PatVerbatim, PatWild, RangeLimits, Stmt,
+};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod generics;
 #[cfg(any(feature = "full", feature = "derive"))]
-pub use generics::{BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq,
-                   PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam,
-                   TypeParamBound, WhereClause, WherePredicate};
+pub use generics::{
+    BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq,
+    PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound,
+    WhereClause, WherePredicate,
+};
 #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
 pub use generics::{ImplGenerics, Turbofish, TypeGenerics};
 
-mod ident;
-pub use ident::Ident;
-
 #[cfg(feature = "full")]
 mod item;
 #[cfg(feature = "full")]
-pub use item::{ArgCaptured, ArgSelf, ArgSelfRef, FnArg, FnDecl, ForeignItem, ForeignItemFn,
-               ForeignItemStatic, ForeignItemType, ForeignItemVerbatim, ImplItem, ImplItemConst,
-               ImplItemMacro, ImplItemMethod, ImplItemType, ImplItemVerbatim, Item, ItemConst,
-               ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMacro2,
-               ItemMod, ItemStatic, ItemStruct, ItemTrait, ItemType, ItemUnion, ItemUse,
-               ItemVerbatim, MethodSig, TraitItem, TraitItemConst, TraitItemMacro,
-               TraitItemMethod, TraitItemType, TraitItemVerbatim, UseGlob, UseGroup, UseName,
-               UsePath, UseRename, UseTree};
+pub use item::{
+    ArgCaptured, ArgSelf, ArgSelfRef, FnArg, FnDecl, ForeignItem, ForeignItemFn, ForeignItemStatic,
+    ForeignItemType, ForeignItemVerbatim, ImplItem, ImplItemConst, ImplItemMacro, ImplItemMethod,
+    ImplItemType, ImplItemVerbatim, Item, ItemConst, ItemEnum, ItemExternCrate, ItemFn,
+    ItemForeignMod, ItemImpl, ItemMacro, ItemMacro2, ItemMod, ItemStatic, ItemStruct, ItemTrait,
+    ItemType, ItemUnion, ItemUse, ItemVerbatim, MethodSig, TraitItem, TraitItemConst,
+    TraitItemMacro, TraitItemMethod, TraitItemType, TraitItemVerbatim, UseGlob, UseGroup, UseName,
+    UsePath, UseRename, UseTree,
+};
 
 #[cfg(feature = "full")]
 mod file;
 #[cfg(feature = "full")]
 pub use file::File;
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod lifetime;
 #[cfg(any(feature = "full", feature = "derive"))]
 pub use lifetime::Lifetime;
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod lit;
 #[cfg(any(feature = "full", feature = "derive"))]
-pub use lit::{FloatSuffix, IntSuffix, Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat,
-              LitInt, LitStr, LitVerbatim, StrStyle};
+pub use lit::{
+    FloatSuffix, IntSuffix, Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr,
+    LitVerbatim, StrStyle,
+};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod mac;
 #[cfg(any(feature = "full", feature = "derive"))]
 pub use mac::{Macro, MacroDelimiter};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod derive;
@@ -361,33 +386,37 @@ pub use derive::{Data, DataEnum, DataStr
 #[cfg(any(feature = "full", feature = "derive"))]
 mod op;
 #[cfg(any(feature = "full", feature = "derive"))]
 pub use op::{BinOp, UnOp};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod ty;
 #[cfg(any(feature = "full", feature = "derive"))]
-pub use ty::{Abi, BareFnArg, BareFnArgName, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup,
-             TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr,
-             TypeReference, TypeSlice, TypeTraitObject, TypeTuple, TypeVerbatim};
+pub use ty::{
+    Abi, BareFnArg, BareFnArgName, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup,
+    TypeImplTrait, TypeInfer, TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference,
+    TypeSlice, TypeTraitObject, TypeTuple, TypeVerbatim,
+};
 
 #[cfg(any(feature = "full", feature = "derive"))]
 mod path;
-#[cfg(any(feature = "full", feature = "derive"))]
-pub use path::{AngleBracketedGenericArguments, Binding, GenericArgument,
-               ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf};
 #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
 pub use path::PathTokens;
+#[cfg(any(feature = "full", feature = "derive"))]
+pub use path::{
+    AngleBracketedGenericArguments, Binding, GenericArgument, ParenthesizedGenericArguments, Path,
+    PathArguments, PathSegment, QSelf,
+};
 
 #[cfg(feature = "parsing")]
 pub mod buffer;
+pub mod punctuated;
 #[cfg(feature = "parsing")]
 pub mod synom;
-pub mod punctuated;
 #[cfg(any(feature = "full", feature = "derive"))]
 mod tt;
 
 // Not public API except the `parse_quote!` macro.
 #[cfg(feature = "parsing")]
 #[doc(hidden)]
 pub mod parse_quote;
 
@@ -425,17 +454,16 @@ mod gen {
     ///     # fn visit_bin_op(&mut self, node: &'ast BinOp);
     /// }
     /// ```
     ///
     /// *This module is available if Syn is built with the `"visit"` feature.*
     #[cfg(feature = "visit")]
     pub mod visit;
 
-
     /// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
     /// place.
     ///
     /// Each method of the [`VisitMut`] trait is a hook that can be overridden
     /// to customize the behavior when mutating the corresponding type of node.
     /// By default, every method recursively visits the substructure of the
     /// input by invoking the right visitor method of each of its fields.
     ///
@@ -510,17 +538,17 @@ mod gen {
     #[path = "../gen_helper.rs"]
     mod helper;
 }
 pub use gen::*;
 
 ////////////////////////////////////////////////////////////////////////////////
 
 #[cfg(feature = "parsing")]
-use synom::{Synom, Parser};
+use synom::{Parser, Synom};
 
 #[cfg(feature = "parsing")]
 mod error;
 #[cfg(feature = "parsing")]
 use error::ParseError;
 
 // Not public API.
 #[cfg(feature = "parsing")]
@@ -535,17 +563,18 @@ pub use error::parse_error;
 /// messages.
 ///
 /// This function parses a `proc_macro::TokenStream` which is the type used for
 /// interop with the compiler in a procedural macro. To parse a
 /// `proc_macro2::TokenStream`, use [`syn::parse2`] instead.
 ///
 /// [`syn::parse2`]: fn.parse2.html
 ///
-/// *This function is available if Syn is built with the `"parsing"` feature.*
+/// *This function is available if Syn is built with both the `"parsing"` and
+/// `"proc-macro"` features.*
 ///
 /// # Examples
 ///
 /// ```rust
 /// extern crate proc_macro;
 /// use proc_macro::TokenStream;
 ///
 /// extern crate syn;
@@ -594,21 +623,19 @@ where
 ///
 /// *This function is available if Syn is built with the `"parsing"` feature.*
 #[cfg(feature = "parsing")]
 pub fn parse2<T>(tokens: proc_macro2::TokenStream) -> Result<T, ParseError>
 where
     T: Synom,
 {
     let parser = T::parse;
-    parser.parse2(tokens).map_err(|err| {
-        match T::description() {
-            Some(s) => ParseError::new(format!("failed to parse {}: {}", s, err)),
-            None => err,
-        }
+    parser.parse2(tokens).map_err(|err| match T::description() {
+        Some(s) => ParseError::new(format!("failed to parse {}: {}", s, err)),
+        None => err,
     })
 }
 
 /// Parse a string of Rust code into the chosen syntax tree node.
 ///
 /// *This function is available if Syn is built with the `"parsing"` feature.*
 ///
 /// # Hygiene
@@ -710,15 +737,15 @@ pub fn parse_file(mut content: &str) -> 
 #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
 struct TokensOrDefault<'a, T: 'a>(&'a Option<T>);
 
 #[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
 impl<'a, T> quote::ToTokens for TokensOrDefault<'a, T>
 where
     T: quote::ToTokens + Default,
 {
-    fn to_tokens(&self, tokens: &mut quote::Tokens) {
+    fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
         match *self.0 {
             Some(ref t) => t.to_tokens(tokens),
             None => T::default().to_tokens(tokens),
         }
     }
 }
--- a/third_party/rust/syn/src/lifetime.rs
+++ b/third_party/rust/syn/src/lifetime.rs
@@ -5,36 +5,39 @@
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 use std::cmp::Ordering;
 use std::fmt::{self, Display};
 use std::hash::{Hash, Hasher};
 
-use proc_macro2::{Span, Term};
+use proc_macro2::{Ident, Span};
 use unicode_xid::UnicodeXID;
 
+use token::Apostrophe;
+
 /// A Rust lifetime: `'a`.
 ///
 /// Lifetime names must conform to the following rules:
 ///
 /// - Must start with an apostrophe.
 /// - Must not consist of just an apostrophe: `'`.
 /// - Character after the apostrophe must be `_` or a Unicode code point with
 ///   the XID_Start property.
 /// - All following characters must be Unicode code points with the XID_Continue
 ///   property.
 ///
 /// *This type is available if Syn is built with the `"derive"` or `"full"`
 /// feature.*
 #[cfg_attr(feature = "extra-traits", derive(Debug))]
-#[derive(Copy, Clone)]
+#[derive(Clone)]
 pub struct Lifetime {
-    term: Term,
+    pub apostrophe: Apostrophe,
+    pub ident: Ident,
 }
 
 impl Lifetime {
     pub fn new(s: &str, span: Span) -> Self {
         if !s.starts_with('\'') {
             panic!(
                 "lifetime name must start with apostrophe as in \"'a\", \
                  got {:?}",
@@ -60,96 +63,89 @@ impl Lifetime {
             true
         }
 
         if !xid_ok(&s[1..]) {
             panic!("{:?} is not a valid lifetime name", s);
         }
 
         Lifetime {
-            term: Term::new(s, span),
+            apostrophe: Default::default(),
+            ident: Ident::new(&s[1..], span),
         }
     }
-
-    pub fn span(&self) -> Span {
-        self.term.span()
-    }
-
-    pub fn set_span(&mut self, span: Span) {
-        self.term.set_span(span);
-    }
 }
 
 impl Display for Lifetime {
     fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
-        self.term.as_str().fmt(formatter)
+        "'".fmt(formatter)?;
+        self.ident.fmt(formatter)
     }
 }
 
 impl PartialEq for Lifetime {
     fn eq(&self, other: &Lifetime) -> bool {
-        self.term.as_str() == other.term.as_str()
+        self.ident.eq(&other.ident)
     }
 }
 
 impl Eq for Lifetime {}
 
 impl PartialOrd for Lifetime {
     fn partial_cmp(&self, other: &Lifetime) -> Option<Ordering> {
         Some(self.cmp(other))
     }
 }
 
 impl Ord for Lifetime {
     fn cmp(&self, other: &Lifetime) -> Ordering {
-        self.term.as_str().cmp(other.term.as_str())
+        self.ident.cmp(&other.ident)
     }
 }
 
 impl Hash for Lifetime {
     fn hash<H: Hasher>(&self, h: &mut H) {
-        self.term.as_str().hash(h)
+        self.ident.hash(h)
     }
 }
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
-    use synom::Synom;
     use buffer::Cursor;
     use parse_error;
     use synom::PResult;
+    use synom::Synom;
 
     impl Synom for Lifetime {
         fn parse(input: Cursor) -> PResult<Self> {
-            let (term, rest) = match input.term() {
-                Some(term) => term,
-                _ => return parse_error(),
+            let (apostrophe, rest) = Apostrophe::parse(input)?;
+            let (ident, rest) = match rest.ident() {
+                Some(pair) => pair,
+                None => return parse_error(),
             };
-            if !term.as_str().starts_with('\'') {
-                return parse_error();
-            }
 
-            Ok((
-                Lifetime {
-                    term: term,
-                },
-                rest,
-            ))
+            let ret = Lifetime {
+                ident: ident,
+                apostrophe: apostrophe,
+            };
+            Ok((ret, rest))
         }
 
         fn description() -> Option<&'static str> {
             Some("lifetime")
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for Lifetime {
-        fn to_tokens(&self, tokens: &mut Tokens) {
-            self.term.to_tokens(tokens);
+        fn to_tokens(&self, tokens: &mut TokenStream) {
+            self.apostrophe.to_tokens(tokens);
+            self.ident.to_tokens(tokens);
         }
     }
 }
--- a/third_party/rust/syn/src/lit.rs
+++ b/third_party/rust/syn/src/lit.rs
@@ -5,24 +5,23 @@
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
 use proc_macro2::{Literal, Span};
 use std::str;
 
 #[cfg(feature = "printing")]
-use proc_macro2::Term;
+use proc_macro2::Ident;
 
 #[cfg(feature = "parsing")]
 use proc_macro2::TokenStream;
 #[cfg(feature = "parsing")]
 use {ParseError, Synom};
 
-#[cfg(any(feature = "printing", feature = "parsing", feature = "derive"))]
 use proc_macro2::TokenTree;
 
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
 
 ast_enum_of_structs! {
     /// A Rust literal such as a string or integer or boolean.
     ///
@@ -107,19 +106,17 @@ ast_enum_of_structs! {
         }),
     }
 }
 
 impl LitStr {
     pub fn new(value: &str, span: Span) -> Self {
         let mut lit = Literal::string(value);
         lit.set_span(span);
-        LitStr {
-            token: lit,
-        }
+        LitStr { token: lit }
     }
 
     pub fn value(&self) -> String {
         value::parse_lit_str(&self.token.to_string())
     }
 
     /// Parse a syntax tree node from the content of this string literal.
     ///
@@ -132,17 +129,20 @@ impl LitStr {
         // original literal's span.
         fn spanned_tokens(s: &LitStr) -> Result<TokenStream, ParseError> {
             let stream = ::parse_str(&s.value())?;
             Ok(respan_token_stream(stream, s.span()))
         }
 
         // Token stream with every span replaced by the given one.
         fn respan_token_stream(stream: TokenStream, span: Span) -> TokenStream {
-            stream.into_iter().map(|token| respan_token_tree(token, span)).collect()
+            stream
+                .into_iter()
+                .map(|token| respan_token_tree(token, span))
+                .collect()
         }
 
         // Token tree with every span replaced by the given one.
         fn respan_token_tree(mut token: TokenTree, span: Span) -> TokenTree {
             match token {
                 TokenTree::Group(ref mut g) => {
                     let stream = respan_token_stream(g.stream().clone(), span);
                     *g = Group::new(g.delimiter(), stream);
@@ -332,17 +332,17 @@ macro_rules! lit_extra_traits {
         impl Hash for $ty {
             fn hash<H>(&self, state: &mut H)
             where
                 H: Hasher,
             {
                 self.$field.to_string().hash(state);
             }
         }
-    }
+    };
 }
 
 impl LitVerbatim {
     pub fn span(&self) -> Span {
         self.token.span()
     }
 
     pub fn set_span(&mut self, span: Span) {
@@ -408,43 +408,43 @@ ast_enum! {
         F64,
         None,
     }
 }
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
-    use synom::Synom;
     use buffer::Cursor;
     use parse_error;
     use synom::PResult;
+    use synom::Synom;
 
     impl Synom for Lit {
         fn parse(input: Cursor) -> PResult<Self> {
             match input.literal() {
                 Some((lit, rest)) => {
                     if lit.to_string().starts_with('/') {
                         // Doc comment literal which is not a Syn literal
                         parse_error()
                     } else {
                         Ok((Lit::new(lit), rest))
                     }
                 }
-                _ => match input.term() {
-                    Some((term, rest)) => Ok((
+                _ => match input.ident() {
+                    Some((ident, rest)) => Ok((
                         Lit::Bool(LitBool {
-                            value: if term.as_str() == "true" {
+                            value: if ident == "true" {
                                 true
-                            } else if term.as_str() == "false" {
+                            } else if ident == "false" {
                                 false
                             } else {
                                 return parse_error();
                             },
-                            span: term.span(),
+                            span: ident.span(),
                         }),
                         rest,
                     )),
                     _ => parse_error(),
                 },
             }
         }
 
@@ -501,73 +501,74 @@ pub mod parsing {
         |
         _ => reject!()
     ));
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl ToTokens for LitStr {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for LitByteStr {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for LitByte {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for LitChar {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for LitInt {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for LitFloat {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for LitBool {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             let s = if self.value { "true" } else { "false" };
-            tokens.append(Term::new(s, self.span));
+            tokens.append(Ident::new(s, self.span));
         }
     }
 
     impl ToTokens for LitVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.token.to_tokens(tokens);
         }
     }
 }
 
 mod value {
     use super::*;
+    use proc_macro2::TokenStream;
     use std::char;
     use std::ops::{Index, RangeFrom};
-    use proc_macro2::TokenStream;
 
     impl Lit {
         /// Interpret a Syn literal from a proc-macro2 literal.
         ///
         /// Not all proc-macro2 literals are valid Syn literals. In particular,
         /// doc comments are considered by proc-macro2 to be literals but in Syn
         /// they are [`Attribute`].
         ///
@@ -575,52 +576,30 @@ mod value {
         ///
         /// # Panics
         ///
         /// Panics if the input is a doc comment literal.
         pub fn new(token: Literal) -> Self {
             let value = token.to_string();
 
             match value::byte(&value, 0) {
-                b'"' | b'r' => {
-                    return Lit::Str(LitStr {
-                        token: token,
-                    })
-                }
+                b'"' | b'r' => return Lit::Str(LitStr { token: token }),
                 b'b' => match value::byte(&value, 1) {
-                    b'"' | b'r' => {
-                        return Lit::ByteStr(LitByteStr {
-                            token: token,
-                        })
-                    }
-                    b'\'' => {
-                        return Lit::Byte(LitByte {
-                            token: token,
-                        })
-                    }
+                    b'"' | b'r' => return Lit::ByteStr(LitByteStr { token: token }),
+                    b'\'' => return Lit::Byte(LitByte { token: token }),
                     _ => {}
                 },
-                b'\'' => {
-                    return Lit::Char(LitChar {
-                        token: token,
-                    })
-                }
+                b'\'' => return Lit::Char(LitChar { token: token }),
                 b'0'...b'9' => if number_is_int(&value) {
-                    return Lit::Int(LitInt {
-                        token: token,
-                    });
+                    return Lit::Int(LitInt { token: token });
                 } else if number_is_float(&value) {
-                    return Lit::Float(LitFloat {
-                        token: token,
-                    });
+                    return Lit::Float(LitFloat { token: token });
                 } else {
                     // number overflow
-                    return Lit::Verbatim(LitVerbatim {
-                        token: token,
-                    });
+                    return Lit::Verbatim(LitVerbatim { token: token });
                 },
                 _ => if value == "true" || value == "false" {
                     return Lit::Bool(LitBool {
                         value: value == "true",
                         span: token.span(),
                     });
                 },
             }
--- a/third_party/rust/syn/src/mac.rs
+++ b/third_party/rust/syn/src/mac.rs
@@ -41,17 +41,18 @@ ast_enum! {
 }
 
 #[cfg(feature = "extra-traits")]
 impl Eq for Macro {}
 
 #[cfg(feature = "extra-traits")]
 impl PartialEq for Macro {
     fn eq(&self, other: &Self) -> bool {
-        self.path == other.path && self.bang_token == other.bang_token
+        self.path == other.path
+            && self.bang_token == other.bang_token
             && self.delimiter == other.delimiter
             && TokenStreamHelper(&self.tts) == TokenStreamHelper(&other.tts)
     }
 }
 
 #[cfg(feature = "extra-traits")]
 impl Hash for Macro {
     fn hash<H>(&self, state: &mut H)
@@ -88,20 +89,21 @@ pub mod parsing {
             Some("macro invocation")
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for Macro {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.path.to_tokens(tokens);
             self.bang_token.to_tokens(tokens);
             match self.delimiter {
                 MacroDelimiter::Paren(ref paren) => {
                     paren.surround(tokens, |tokens| self.tts.to_tokens(tokens));
                 }
                 MacroDelimiter::Brace(ref brace) => {
                     brace.surround(tokens, |tokens| self.tts.to_tokens(tokens));
--- a/third_party/rust/syn/src/macros.rs
+++ b/third_party/rust/syn/src/macros.rs
@@ -128,17 +128,17 @@ macro_rules! generate_to_tokens {
         generate_to_tokens!(
             ($($arms)* $name::$variant(ref _e) => to_tokens_call!(_e, $tokens, $($rest)*),)
             $tokens $name { $($next)* }
         );
     };
 
     (($($arms:tt)*) $tokens:ident $name:ident {}) => {
         impl ::quote::ToTokens for $name {
-            fn to_tokens(&self, $tokens: &mut ::quote::Tokens) {
+            fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) {
                 match *self {
                     $($arms)*
                 }
             }
         }
     };
 }
 
--- a/third_party/rust/syn/src/op.rs
+++ b/third_party/rust/syn/src/op.rs
@@ -169,20 +169,21 @@ pub mod parsing {
             Some("unary operator: `*`, `!`, or `-`")
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for BinOp {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 BinOp::Add(ref t) => t.to_tokens(tokens),
                 BinOp::Sub(ref t) => t.to_tokens(tokens),
                 BinOp::Mul(ref t) => t.to_tokens(tokens),
                 BinOp::Div(ref t) => t.to_tokens(tokens),
                 BinOp::Rem(ref t) => t.to_tokens(tokens),
                 BinOp::And(ref t) => t.to_tokens(tokens),
                 BinOp::Or(ref t) => t.to_tokens(tokens),
@@ -207,17 +208,17 @@ mod printing {
                 BinOp::BitOrEq(ref t) => t.to_tokens(tokens),
                 BinOp::ShlEq(ref t) => t.to_tokens(tokens),
                 BinOp::ShrEq(ref t) => t.to_tokens(tokens),
             }
         }
     }
 
     impl ToTokens for UnOp {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 UnOp::Deref(ref t) => t.to_tokens(tokens),
                 UnOp::Not(ref t) => t.to_tokens(tokens),
                 UnOp::Neg(ref t) => t.to_tokens(tokens),
             }
         }
     }
 }
--- a/third_party/rust/syn/src/parse_quote.rs
+++ b/third_party/rust/syn/src/parse_quote.rs
@@ -83,45 +83,48 @@ macro_rules! parse_quote {
     ($($tt:tt)*) => {
         $crate::parse_quote::parse($crate::parse_quote::From::from(quote!($($tt)*)))
     };
 }
 
 ////////////////////////////////////////////////////////////////////////////////
 // Can parse any type that implements Synom.
 
-use synom::{Synom, Parser, PResult};
 use buffer::Cursor;
 use proc_macro2::TokenStream;
+use synom::{PResult, Parser, Synom};
 
 // Not public API.
 #[doc(hidden)]
 pub use std::convert::From;
 
 // Not public API.
 #[doc(hidden)]
 pub fn parse<T: ParseQuote>(token_stream: TokenStream) -> T {
     let parser = T::parse;
     match parser.parse2(token_stream) {
         Ok(t) => t,
         Err(err) => match T::description() {
             Some(s) => panic!("failed to parse {}: {}", s, err),
             None => panic!("{}", err),
-        }
+        },
     }
 }
 
 // Not public API.
 #[doc(hidden)]
 pub trait ParseQuote: Sized {
     fn parse(input: Cursor) -> PResult<Self>;
     fn description() -> Option<&'static str>;
 }
 
-impl<T> ParseQuote for T where T: Synom {
+impl<T> ParseQuote for T
+where
+    T: Synom,
+{
     fn parse(input: Cursor) -> PResult<Self> {
         <T as Synom>::parse(input)
     }
 
     fn description() -> Option<&'static str> {
         <T as Synom>::description()
     }
 }
--- a/third_party/rust/syn/src/parsers.rs
+++ b/third_party/rust/syn/src/parsers.rs
@@ -90,16 +90,18 @@ macro_rules! named {
         }
     };
 }
 
 #[cfg(synom_verbose_trace)]
 #[macro_export]
 macro_rules! call {
     ($i:expr, $fun:expr $(, $args:expr)*) => {{
+        #[allow(unused_imports)]
+        use $crate::synom::ext::*;
         let i = $i;
         eprintln!(concat!(" -> ", stringify!($fun), " @ {:?}"), i);
         let r = $fun(i $(, $args)*);
         match r {
             Ok((_, i)) => eprintln!(concat!("OK  ", stringify!($fun), " @ {:?}"), i),
             Err(_) => eprintln!(concat!("ERR ", stringify!($fun), " @ {:?}"), i),
         }
         r
@@ -138,19 +140,21 @@ macro_rules! call {
 /// #
 /// # fn main() {}
 /// ```
 ///
 /// *This macro is available if Syn is built with the `"parsing"` feature.*
 #[cfg(not(synom_verbose_trace))]
 #[macro_export]
 macro_rules! call {
-    ($i:expr, $fun:expr $(, $args:expr)*) => {
+    ($i:expr, $fun:expr $(, $args:expr)*) => {{
+        #[allow(unused_imports)]
+        use $crate::synom::ext::*;
         $fun($i $(, $args)*)
-    };
+    }};
 }
 
 /// Transform the result of a parser by applying a function or closure.
 ///
 /// - **Syntax:** `map!(THING, FN)`
 /// - **Output:** the return type of function FN applied to THING
 ///
 /// ```rust
@@ -212,17 +216,17 @@ pub fn invoke<T, R, F: FnOnce(T) -> R>(f
 ///
 /// - **Syntax:** `not!(THING)`
 /// - **Output:** `()`
 ///
 /// ```rust
 /// #[macro_use]
 /// extern crate syn;
 ///
-/// use syn::{Expr, Ident};
+/// use syn::Expr;
 ///
 /// /// Parses any expression that does not begin with a `-` minus sign.
 /// named!(not_negative_expr -> Expr, do_parse!(
 ///     not!(punct!(-)) >>
 ///     e: syn!(Expr) >>
 ///     (e)
 /// ));
 /// #
@@ -718,17 +722,17 @@ macro_rules! value {
 /// ```
 ///
 /// *This macro is available if Syn is built with the `"parsing"` feature.*
 #[macro_export]
 macro_rules! reject {
     ($i:expr,) => {{
         let _ = $i;
         $crate::parse_error()
-    }}
+    }};
 }
 
 /// Run a series of parsers and produce all of the results in a tuple.
 ///
 /// - **Syntax:** `tuple!(A, B, C, ...)`
 /// - **Output:** `(A, B, C, ...)`
 ///
 /// ```rust
@@ -806,25 +810,26 @@ macro_rules! tuple_parser {
 /// - **Syntax:** `alt!(THING1 | THING2 => { FUNC } | ...)`
 /// - **Output:** `T`, the return type of `THING1` and `FUNC(THING2)` and ...
 ///
 /// # Example
 ///
 /// ```rust
 /// #[macro_use]
 /// extern crate syn;
+/// extern crate proc_macro2;
 ///
-/// use syn::Ident;
+/// use proc_macro2::{Ident, Span};
 ///
 /// // Parse any identifier token, or the `!` token in which case the
 /// // identifier is treated as `"BANG"`.
 /// named!(ident_or_bang -> Ident, alt!(
 ///     syn!(Ident)
 ///     |
-///     punct!(!) => { |_| "BANG".into() }
+///     punct!(!) => { |_| Ident::new("BANG", Span::call_site()) }
 /// ));
 /// #
 /// # fn main() {}
 /// ```
 ///
 /// The `alt!` macro is most commonly seen when parsing a syntax tree enum such
 /// as the [`Item`] enum.
 ///
@@ -921,20 +926,20 @@ macro_rules! alt {
 /// - **Syntax:** `do_parse!(name: THING1 >> THING2 >> (RESULT))`
 /// - **Output:** `RESULT`
 ///
 /// ```rust
 /// #[macro_use]
 /// extern crate syn;
 /// extern crate proc_macro2;
 ///
-/// use syn::Ident;
+/// use proc_macro2::Ident;
+/// use proc_macro2::TokenStream;
 /// use syn::token::Paren;
 /// use syn::synom::Synom;
-/// use proc_macro2::TokenStream;
 ///
 /// /// Parse a macro invocation that uses `(` `)` parentheses.
 /// ///
 /// /// Example: `stringify!($args)`.
 /// struct Macro {
 ///     name: Ident,
 ///     bang_token: Token![!],
 ///     paren_token: Paren,
@@ -1257,16 +1262,68 @@ macro_rules! tap {
 /// *This macro is available if Syn is built with the `"parsing"` feature.*
 #[macro_export]
 macro_rules! syn {
     ($i:expr, $t:ty) => {
         <$t as $crate::synom::Synom>::parse($i)
     };
 }
 
+/// Parse the given word as a keyword.
+///
+/// For words that are keywords in the Rust language, it is better to use the
+/// [`keyword!`] parser which returns a unique type for each keyword.
+///
+/// [`keyword!`]: macro.keyword.html
+///
+/// - **Syntax:** `custom_keyword!(KEYWORD)`
+/// - **Output:** `Ident`
+///
+/// ```rust
+/// #[macro_use]
+/// extern crate syn;
+///
+/// use syn::Ident;
+/// use syn::synom::Synom;
+///
+/// struct Flag {
+///     name: Ident,
+/// }
+///
+/// // Parses the custom keyword `flag` followed by any name for a flag.
+/// //
+/// // Example: `flag Verbose`
+/// impl Synom for Flag {
+///     named!(parse -> Flag, do_parse!(
+///         custom_keyword!(flag) >>
+///         name: syn!(Ident) >>
+///         (Flag { name })
+///     ));
+/// }
+/// #
+/// # fn main() {}
+/// ```
+///
+/// *This macro is available if Syn is built with the `"parsing"` feature.*
+#[macro_export]
+macro_rules! custom_keyword {
+    ($i:expr, $keyword:ident) => {
+        match <$crate::Ident as $crate::synom::Synom>::parse($i) {
+            ::std::result::Result::Err(err) => ::std::result::Result::Err(err),
+            ::std::result::Result::Ok((token, i)) => {
+                if token == stringify!($keyword) {
+                    ::std::result::Result::Ok((token, i))
+                } else {
+                    $crate::parse_error()
+                }
+            }
+        }
+    };
+}
+
 /// Parse inside of `(` `)` parentheses.
 ///
 /// This macro parses a set of balanced parentheses and invokes a sub-parser on
 /// the content inside. The sub-parser is required to consume all tokens within
 /// the parentheses in order for this parser to return successfully.
 ///
 /// - **Syntax:** `parens!(CONTENT)`
 /// - **Output:** `(token::Paren, CONTENT)`
--- a/third_party/rust/syn/src/path.rs
+++ b/third_party/rust/syn/src/path.rs
@@ -1,18 +1,18 @@
 // Copyright 2018 Syn Developers
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+use super::*;
 use punctuated::Punctuated;
-use super::*;
 
 ast_struct! {
     /// A path at which a named item is exported: `std::collections::HashMap`.
     ///
     /// *This type is available if Syn is built with the `"derive"` or `"full"`
     /// feature.*
     pub struct Path {
         pub leading_colon: Option<Token![::]>,
@@ -26,27 +26,29 @@ impl Path {
     }
 }
 
 /// A helper for printing a self-type qualified path as tokens.
 ///
 /// ```rust
 /// extern crate syn;
 /// extern crate quote;
+/// extern crate proc_macro2;
 ///
 /// use syn::{QSelf, Path, PathTokens};
-/// use quote::{Tokens, ToTokens};
+/// use proc_macro2::TokenStream;
+/// use quote::ToTokens;
 ///
 /// struct MyNode {
 ///     qself: Option<QSelf>,
 ///     path: Path,
 /// }
 ///
 /// impl ToTokens for MyNode {
-///     fn to_tokens(&self, tokens: &mut Tokens) {
+///     fn to_tokens(&self, tokens: &mut TokenStream) {
 ///         PathTokens(&self.qself, &self.path).to_tokens(tokens);
 ///     }
 /// }
 /// #
 /// # fn main() {}
 /// ```
 ///
 /// *This type is available if Syn is built with the `"derive"` or `"full"`
@@ -290,17 +292,17 @@ pub mod parsing {
         fn description() -> Option<&'static str> {
             Some("angle bracketed generic arguments")
         }
     }
 
     impl Synom for ParenthesizedGenericArguments {
         named!(parse -> Self, do_parse!(
             data: parens!(Punctuated::parse_terminated) >>
-            output: syn!(ReturnType) >>
+            output: call!(ReturnType::without_plus) >>
             (ParenthesizedGenericArguments {
                 paren_token: data.0,
                 inputs: data.1,
                 output: output,
             })
         ));
 
         fn description() -> Option<&'static str> {
@@ -411,49 +413,50 @@ pub mod parsing {
         not!(punct!(=)) >>
         (ty)
     ));
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for Path {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.leading_colon.to_tokens(tokens);
             self.segments.to_tokens(tokens);
         }
     }
 
     impl ToTokens for PathSegment {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.ident.to_tokens(tokens);
             self.arguments.to_tokens(tokens);
         }
     }
 
     impl ToTokens for PathArguments {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 PathArguments::None => {}
                 PathArguments::AngleBracketed(ref arguments) => {
                     arguments.to_tokens(tokens);
                 }
                 PathArguments::Parenthesized(ref arguments) => {
                     arguments.to_tokens(tokens);
                 }
             }
         }
     }
 
     impl ToTokens for GenericArgument {
         #[cfg_attr(feature = "cargo-clippy", allow(match_same_arms))]
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 GenericArgument::Lifetime(ref lt) => lt.to_tokens(tokens),
                 GenericArgument::Type(ref ty) => ty.to_tokens(tokens),
                 GenericArgument::Binding(ref tb) => tb.to_tokens(tokens),
                 GenericArgument::Const(ref e) => match *e {
                     Expr::Lit(_) => e.to_tokens(tokens),
 
                     // NOTE: We should probably support parsing blocks with only
@@ -468,17 +471,17 @@ mod printing {
                         e.to_tokens(tokens);
                     }),
                 },
             }
         }
     }
 
     impl ToTokens for AngleBracketedGenericArguments {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.colon2_token.to_tokens(tokens);
             self.lt_token.to_tokens(tokens);
 
             // Print lifetimes before types and consts, all before bindings,
             // regardless of their order in self.args.
             //
             // TODO: ordering rules for const arguments vs type arguments have
             // not been settled yet. https://github.com/rust-lang/rust/issues/44580
@@ -511,34 +514,34 @@ mod printing {
                 }
             }
 
             self.gt_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for Binding {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.ident.to_tokens(tokens);
             self.eq_token.to_tokens(tokens);
             self.ty.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ParenthesizedGenericArguments {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.paren_token.surround(tokens, |tokens| {
                 self.inputs.to_tokens(tokens);
             });
             self.output.to_tokens(tokens);
         }
     }
 
     impl<'a> ToTokens for PathTokens<'a> {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             let qself = match *self.0 {
                 Some(ref qself) => qself,
                 None => return self.1.to_tokens(tokens),
             };
             qself.lt_token.to_tokens(tokens);
             qself.ty.to_tokens(tokens);
 
             // XXX: Gross.
--- a/third_party/rust/syn/src/punctuated.rs
+++ b/third_party/rust/syn/src/punctuated.rs
@@ -22,130 +22,141 @@
 //!
 //! [`Punctuated<T, P>`]: struct.Punctuated.html
 //!
 //! ```text
 //! a_function_call(arg1, arg2, arg3);
 //!                 ^^^^^ ~~~~~ ^^^^
 //! ```
 
+#[cfg(feature = "extra-traits")]
+use std::fmt::{self, Debug};
 #[cfg(any(feature = "full", feature = "derive"))]
 use std::iter;
 use std::iter::FromIterator;
 use std::ops::{Index, IndexMut};
+use std::option;
 use std::slice;
 use std::vec;
-#[cfg(feature = "extra-traits")]
-use std::fmt::{self, Debug};
 
 #[cfg(feature = "parsing")]
-use synom::{Synom, PResult};
-#[cfg(feature = "parsing")]
 use buffer::Cursor;
 #[cfg(feature = "parsing")]
 use parse_error;
+#[cfg(feature = "parsing")]
+use synom::{PResult, Synom};
 
 /// A punctuated sequence of syntax tree nodes of type `T` separated by
 /// punctuation of type `P`.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
 /// [module documentation]: index.html
 #[cfg_attr(feature = "extra-traits", derive(Eq, PartialEq, Hash))]
 #[cfg_attr(feature = "clone-impls", derive(Clone))]
 pub struct Punctuated<T, P> {
-    inner: Vec<(T, Option<P>)>,
+    inner: Vec<(T, P)>,
+    last: Option<Box<T>>,
 }
 
 impl<T, P> Punctuated<T, P> {
     /// Creates an empty punctuated sequence.
     pub fn new() -> Punctuated<T, P> {
-        Punctuated { inner: Vec::new() }
+        Punctuated {
+            inner: Vec::new(),
+            last: None,
+        }
     }
 
     /// Determines whether this punctuated sequence is empty, meaning it
     /// contains no syntax tree nodes or punctuation.
     pub fn is_empty(&self) -> bool {
-        self.inner.len() == 0
+        self.inner.len() == 0 && self.last.is_none()
     }
 
     /// Returns the number of syntax tree nodes in this punctuated sequence.
     ///
     /// This is the number of nodes of type `T`, not counting the punctuation of
     /// type `P`.
     pub fn len(&self) -> usize {
-        self.inner.len()
+        self.inner.len() + if self.last.is_some() { 1 } else { 0 }
     }
 
     /// Borrows the first punctuated pair in this sequence.
     pub fn first(&self) -> Option<Pair<&T, &P>> {
-        self.inner.first().map(|&(ref t, ref d)| match *d {
-            Some(ref d) => Pair::Punctuated(t, d),
-            None => Pair::End(t),
-        })
+        self.pairs().next()
     }
 
     /// Borrows the last punctuated pair in this sequence.
     pub fn last(&self) -> Option<Pair<&T, &P>> {
-        self.inner.last().map(|&(ref t, ref d)| match *d {
-            Some(ref d) => Pair::Punctuated(t, d),
-            None => Pair::End(t),
-        })
+        if self.last.is_some() {
+            self.last.as_ref().map(|t| Pair::End(t.as_ref()))
+        } else {
+            self.inner
+                .last()
+                .map(|&(ref t, ref d)| Pair::Punctuated(t, d))
+        }
     }
 
     /// Mutably borrows the last punctuated pair in this sequence.
     pub fn last_mut(&mut self) -> Option<Pair<&mut T, &mut P>> {
-        self.inner
-            .last_mut()
-            .map(|&mut (ref mut t, ref mut d)| match *d {
-                Some(ref mut d) => Pair::Punctuated(t, d),
-                None => Pair::End(t),
-            })
+        if self.last.is_some() {
+            self.last.as_mut().map(|t| Pair::End(t.as_mut()))
+        } else {
+            self.inner
+                .last_mut()
+                .map(|&mut (ref mut t, ref mut d)| Pair::Punctuated(t, d))
+        }
     }
 
     /// Returns an iterator over borrowed syntax tree nodes of type `&T`.
     pub fn iter(&self) -> Iter<T> {
         Iter {
             inner: Box::new(PrivateIter {
                 inner: self.inner.iter(),
+                last: self.last.as_ref().map(|t| t.as_ref()).into_iter(),
             }),
         }
     }
 
     /// Returns an iterator over mutably borrowed syntax tree nodes of type
     /// `&mut T`.
     pub fn iter_mut(&mut self) -> IterMut<T> {
         IterMut {
             inner: Box::new(PrivateIterMut {
                 inner: self.inner.iter_mut(),
+                last: self.last.as_mut().map(|t| t.as_mut()).into_iter(),
             }),
         }
     }
 
     /// Returns an iterator over the contents of this sequence as borrowed
     /// punctuated pairs.
     pub fn pairs(&self) -> Pairs<T, P> {
         Pairs {
             inner: self.inner.iter(),
+            last: self.last.as_ref().map(|t| t.as_ref()).into_iter(),
         }
     }
 
     /// Returns an iterator over the contents of this sequence as mutably
     /// borrowed punctuated pairs.
     pub fn pairs_mut(&mut self) -> PairsMut<T, P> {
         PairsMut {
             inner: self.inner.iter_mut(),
+            last: self.last.as_mut().map(|t| t.as_mut()).into_iter(),
         }
     }
 
     /// Returns an iterator over the contents of this sequence as owned
     /// punctuated pairs.
     pub fn into_pairs(self) -> IntoPairs<T, P> {
         IntoPairs {
             inner: self.inner.into_iter(),
+            last: self.last.map(|t| *t).into_iter(),
         }
     }
 
     /// Appends a syntax tree node onto the end of this punctuated sequence. The
     /// sequence must previously have a trailing punctuation.
     ///
     /// Use [`push`] instead if the punctuated sequence may or may not already
     /// have trailing punctuation.
@@ -153,57 +164,54 @@ impl<T, P> Punctuated<T, P> {
     /// [`push`]: #method.push
     ///
     /// # Panics
     ///
     /// Panics if the sequence does not already have a trailing punctuation when
     /// this method is called.
     pub fn push_value(&mut self, value: T) {
         assert!(self.empty_or_trailing());
-        self.inner.push((value, None));
+        self.last = Some(Box::new(value));
     }
 
     /// Appends a trailing punctuation onto the end of this punctuated sequence.
     /// The sequence must be non-empty and must not already have trailing
     /// punctuation.
     ///
     /// # Panics
     ///
     /// Panics if the sequence is empty or already has a trailing punctuation.
     pub fn push_punct(&mut self, punctuation: P) {
-        assert!(!self.is_empty());
-        let last = self.inner.last_mut().unwrap();
-        assert!(last.1.is_none());
-        last.1 = Some(punctuation);
+        assert!(self.last.is_some());
+        let last = self.last.take().unwrap();
+        self.inner.push((*last, punctuation));
     }
 
     /// Removes the last punctuated pair from this sequence, or `None` if the
     /// sequence is empty.
     pub fn pop(&mut self) -> Option<Pair<T, P>> {
-        self.inner.pop().map(|(t, d)| Pair::new(t, d))
+        if self.last.is_some() {
+            self.last.take().map(|t| Pair::End(*t))
+        } else {
+            self.inner.pop().map(|(t, d)| Pair::Punctuated(t, d))
+        }
     }
 
     /// Determines whether this punctuated sequence ends with a trailing
     /// punctuation.
     pub fn trailing_punct(&self) -> bool {
-        self.inner
-            .last()
-            .map(|last| last.1.is_some())
-            .unwrap_or(false)
+        self.last.is_none() && !self.is_empty()
     }
 
     /// Returns true if either this `Punctuated` is empty, or it has a trailing
     /// punctuation.
     ///
     /// Equivalent to `punctuated.is_empty() || punctuated.trailing_punct()`.
     pub fn empty_or_trailing(&self) -> bool {
-        self.inner
-            .last()
-            .map(|last| last.1.is_some())
-            .unwrap_or(true)
+        self.last.is_none()
     }
 }
 
 impl<T, P> Punctuated<T, P>
 where
     P: Default,
 {
     /// Appends a syntax tree node onto the end of this punctuated sequence.
@@ -225,25 +233,30 @@ where
     /// Panics if `index` is greater than the number of elements previously in
     /// this punctuated sequence.
     pub fn insert(&mut self, index: usize, value: T) {
         assert!(index <= self.len());
 
         if index == self.len() {
             self.push(value);
         } else {
-            self.inner.insert(index, (value, Some(Default::default())));
+            self.inner.insert(index, (value, Default::default()));
         }
     }
 }
 
 #[cfg(feature = "extra-traits")]
 impl<T: Debug, P: Debug> Debug for Punctuated<T, P> {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.inner.fmt(f)
+        let mut list = f.debug_list();
+        list.entries(&self.inner);
+        for t in self.last.iter() {
+            list.entry(&*t);
+        }
+        list.finish()
     }
 }
 
 impl<T, P> FromIterator<T> for Punctuated<T, P>
 where
     P: Default,
 {
     fn from_iter<I: IntoIterator<Item = T>>(i: I) -> Self {
@@ -269,32 +282,41 @@ impl<T, P> FromIterator<Pair<T, P>> for 
         let mut ret = Punctuated::new();
         ret.extend(i);
         ret
     }
 }
 
 impl<T, P> Extend<Pair<T, P>> for Punctuated<T, P> {
     fn extend<I: IntoIterator<Item = Pair<T, P>>>(&mut self, i: I) {
+        assert!(self.empty_or_trailing());
+        let mut nomore = false;
         for pair in i {
+            if nomore {
+                panic!("Punctuated extended with items after a Pair::End");
+            }
             match pair {
-                Pair::Punctuated(a, b) => self.inner.push((a, Some(b))),
-                Pair::End(a) => self.inner.push((a, None)),
+                Pair::Punctuated(a, b) => self.inner.push((a, b)),
+                Pair::End(a) => {
+                    self.last = Some(Box::new(a));
+                    nomore = true;
+                }
             }
         }
     }
 }
 
 impl<T, P> IntoIterator for Punctuated<T, P> {
     type Item = T;
     type IntoIter = IntoIter<T, P>;
 
     fn into_iter(self) -> Self::IntoIter {
         IntoIter {
             inner: self.inner.into_iter(),
+            last: self.last.map(|t| *t).into_iter(),
         }
     }
 }
 
 impl<'a, T, P> IntoIterator for &'a Punctuated<T, P> {
     type Item = &'a T;
     type IntoIter = Iter<'a, T>;
 
@@ -319,98 +341,130 @@ impl<T, P> Default for Punctuated<T, P> 
 }
 
 /// An iterator over borrowed pairs of type `Pair<&T, &P>`.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
 /// [module documentation]: index.html
 pub struct Pairs<'a, T: 'a, P: 'a> {
-    inner: slice::Iter<'a, (T, Option<P>)>,
+    inner: slice::Iter<'a, (T, P)>,
+    last: option::IntoIter<&'a T>,
 }
 
 impl<'a, T, P> Iterator for Pairs<'a, T, P> {
     type Item = Pair<&'a T, &'a P>;
 
     fn next(&mut self) -> Option<Self::Item> {
-        self.inner.next().map(|pair| match pair.1 {
-            Some(ref p) => Pair::Punctuated(&pair.0, p),
-            None => Pair::End(&pair.0),
-        })
+        self.inner
+            .next()
+            .map(|&(ref t, ref p)| Pair::Punctuated(t, p))
+            .or_else(|| self.last.next().map(Pair::End))
+    }
+}
+
+impl<'a, T, P> ExactSizeIterator for Pairs<'a, T, P> {
+    fn len(&self) -> usize {
+        self.inner.len() + self.last.len()
     }
 }
 
 /// An iterator over mutably borrowed pairs of type `Pair<&mut T, &mut P>`.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
 /// [module documentation]: index.html
 pub struct PairsMut<'a, T: 'a, P: 'a> {
-    inner: slice::IterMut<'a, (T, Option<P>)>,
+    inner: slice::IterMut<'a, (T, P)>,
+    last: option::IntoIter<&'a mut T>,
 }
 
 impl<'a, T, P> Iterator for PairsMut<'a, T, P> {
     type Item = Pair<&'a mut T, &'a mut P>;
 
     fn next(&mut self) -> Option<Self::Item> {
-        self.inner.next().map(|pair| match pair.1 {
-            Some(ref mut p) => Pair::Punctuated(&mut pair.0, p),
-            None => Pair::End(&mut pair.0),
-        })
+        self.inner
+            .next()
+            .map(|&mut (ref mut t, ref mut p)| Pair::Punctuated(t, p))
+            .or_else(|| self.last.next().map(Pair::End))
+    }
+}
+
+impl<'a, T, P> ExactSizeIterator for PairsMut<'a, T, P> {
+    fn len(&self) -> usize {
+        self.inner.len() + self.last.len()
     }
 }
 
 /// An iterator over owned pairs of type `Pair<T, P>`.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
 /// [module documentation]: index.html
 pub struct IntoPairs<T, P> {
-    inner: vec::IntoIter<(T, Option<P>)>,
+    inner: vec::IntoIter<(T, P)>,
+    last: option::IntoIter<T>,
 }
 
 impl<T, P> Iterator for IntoPairs<T, P> {
     type Item = Pair<T, P>;
 
     fn next(&mut self) -> Option<Self::Item> {
-        self.inner.next().map(|pair| match pair.1 {
-            Some(p) => Pair::Punctuated(pair.0, p),
-            None => Pair::End(pair.0),
-        })
+        self.inner
+            .next()
+            .map(|(t, p)| Pair::Punctuated(t, p))
+            .or_else(|| self.last.next().map(Pair::End))
+    }
+}
+
+impl<T, P> ExactSizeIterator for IntoPairs<T, P> {
+    fn len(&self) -> usize {
+        self.inner.len() + self.last.len()
     }
 }
 
 /// An iterator over owned values of type `T`.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
 /// [module documentation]: index.html
 pub struct IntoIter<T, P> {
-    inner: vec::IntoIter<(T, Option<P>)>,
+    inner: vec::IntoIter<(T, P)>,
+    last: option::IntoIter<T>,
 }
 
 impl<T, P> Iterator for IntoIter<T, P> {
     type Item = T;
 
     fn next(&mut self) -> Option<Self::Item> {
-        self.inner.next().map(|pair| pair.0)
+        self.inner
+            .next()
+            .map(|pair| pair.0)
+            .or_else(|| self.last.next())
+    }
+}
+
+impl<T, P> ExactSizeIterator for IntoIter<T, P> {
+    fn len(&self) -> usize {
+        self.inner.len() + self.last.len()
     }
 }
 
 /// An iterator over borrowed values of type `&T`.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
 /// [module documentation]: index.html
 pub struct Iter<'a, T: 'a> {
-    inner: Box<Iterator<Item = &'a T> + 'a>,
+    inner: Box<ExactSizeIterator<Item = &'a T> + 'a>,
 }
 
 struct PrivateIter<'a, T: 'a, P: 'a> {
-    inner: slice::Iter<'a, (T, Option<P>)>,
+    inner: slice::Iter<'a, (T, P)>,
+    last: option::IntoIter<&'a T>,
 }
 
 #[cfg(any(feature = "full", feature = "derive"))]
 impl<'a, T> Iter<'a, T> {
     // Not public API.
     #[doc(hidden)]
     pub fn private_empty() -> Self {
         Iter {
@@ -422,50 +476,81 @@ impl<'a, T> Iter<'a, T> {
 impl<'a, T> Iterator for Iter<'a, T> {
     type Item = &'a T;
 
     fn next(&mut self) -> Option<Self::Item> {
         self.inner.next()
     }
 }
 
+impl<'a, T> ExactSizeIterator for Iter<'a, T> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
 impl<'a, T, P> Iterator for PrivateIter<'a, T, P> {
     type Item = &'a T;
 
     fn next(&mut self) -> Option<Self::Item> {
-        self.inner.next().map(|pair| &pair.0)
+        self.inner
+            .next()
+            .map(|pair| &pair.0)
+            .or_else(|| self.last.next())
+    }
+}
+
+impl<'a, T, P> ExactSizeIterator for PrivateIter<'a, T, P> {
+    fn len(&self) -> usize {
+        self.inner.len() + self.last.len()
     }
 }
 
 /// An iterator over mutably borrowed values of type `&mut T`.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
 /// [module documentation]: index.html
 pub struct IterMut<'a, T: 'a> {
-    inner: Box<Iterator<Item = &'a mut T> + 'a>,
+    inner: Box<ExactSizeIterator<Item = &'a mut T> + 'a>,
 }
 
 struct PrivateIterMut<'a, T: 'a, P: 'a> {
-    inner: slice::IterMut<'a, (T, Option<P>)>,
+    inner: slice::IterMut<'a, (T, P)>,
+    last: option::IntoIter<&'a mut T>,
 }
 
 impl<'a, T> Iterator for IterMut<'a, T> {
     type Item = &'a mut T;
 
     fn next(&mut self) -> Option<Self::Item> {
         self.inner.next()
     }
 }
 
+impl<'a, T> ExactSizeIterator for IterMut<'a, T> {
+    fn len(&self) -> usize {
+        self.inner.len()
+    }
+}
+
 impl<'a, T, P> Iterator for PrivateIterMut<'a, T, P> {
     type Item = &'a mut T;
 
     fn next(&mut self) -> Option<Self::Item> {
-        self.inner.next().map(|pair| &mut pair.0)
+        self.inner
+            .next()
+            .map(|pair| &mut pair.0)
+            .or_else(|| self.last.next())
+    }
+}
+
+impl<'a, T, P> ExactSizeIterator for PrivateIterMut<'a, T, P> {
+    fn len(&self) -> usize {
+        self.inner.len() + self.last.len()
     }
 }
 
 /// A single syntax tree node of type `T` followed by its trailing punctuation
 /// of type `P` if any.
 ///
 /// Refer to the [module documentation] for details about punctuated sequences.
 ///
@@ -525,23 +610,37 @@ impl<T, P> Pair<T, P> {
         }
     }
 }
 
 impl<T, P> Index<usize> for Punctuated<T, P> {
     type Output = T;
 
     fn index(&self, index: usize) -> &Self::Output {
-        &self.inner[index].0
+        if index == self.len() - 1 {
+            match self.last {
+                Some(ref t) => t,
+                None => &self.inner[index].0,
+            }
+        } else {
+            &self.inner[index].0
+        }
     }
 }
 
 impl<T, P> IndexMut<usize> for Punctuated<T, P> {
     fn index_mut(&mut self, index: usize) -> &mut Self::Output {
-        &mut self.inner[index].0
+        if index == self.len() - 1 {
+            match self.last {
+                Some(ref mut t) => t,
+                None => &mut self.inner[index].0,
+            }
+        } else {
+            &mut self.inner[index].0
+        }
     }
 }
 
 #[cfg(feature = "parsing")]
 impl<T, P> Punctuated<T, P>
 where
     T: Synom,
     P: Synom,
@@ -574,20 +673,17 @@ where
 
 #[cfg(feature = "parsing")]
 impl<T, P> Punctuated<T, P>
 where
     P: Synom,
 {
     /// Parse **zero or more** syntax tree nodes using the given parser with
     /// punctuation in between and **no trailing** punctuation.
-    pub fn parse_separated_with(
-        input: Cursor,
-        parse: fn(Cursor) -> PResult<T>,
-    ) -> PResult<Self> {
+    pub fn parse_separated_with(input: Cursor, parse: fn(Cursor) -> PResult<T>) -> PResult<Self> {
         Self::parse(input, parse, false)
     }
 
     /// Parse **one or more** syntax tree nodes using the given parser with
     /// punctuation in between and **no trailing** punctuation.
     pub fn parse_separated_nonempty_with(
         input: Cursor,
         parse: fn(Cursor) -> PResult<T>,
@@ -595,20 +691,17 @@ where
         match Self::parse(input, parse, false) {
             Ok((ref b, _)) if b.is_empty() => parse_error(),
             other => other,
         }
     }
 
     /// Parse **zero or more** syntax tree nodes using the given parser with
     /// punctuation in between and **optional trailing** punctuation.
-    pub fn parse_terminated_with(
-        input: Cursor,
-        parse: fn(Cursor) -> PResult<T>,
-    ) -> PResult<Self> {
+    pub fn parse_terminated_with(input: Cursor, parse: fn(Cursor) -> PResult<T>) -> PResult<Self> {
         Self::parse(input, parse, true)
     }
 
     /// Parse **one or more** syntax tree nodes using the given parser with
     /// punctuation in between and **optional trailing** punctuation.
     pub fn parse_terminated_nonempty_with(
         input: Cursor,
         parse: fn(Cursor) -> PResult<T>,
@@ -664,34 +757,35 @@ where
             }
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::{ToTokens, TokenStreamExt};
 
     impl<T, P> ToTokens for Punctuated<T, P>
     where
         T: ToTokens,
         P: ToTokens,
     {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             tokens.append_all(self.pairs())
         }
     }
 
     impl<T, P> ToTokens for Pair<T, P>
     where
         T: ToTokens,
         P: ToTokens,
     {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 Pair::Punctuated(ref a, ref b) => {
                     a.to_tokens(tokens);
                     b.to_tokens(tokens);
                 }
                 Pair::End(ref a) => a.to_tokens(tokens),
             }
         }
--- a/third_party/rust/syn/src/spanned.rs
+++ b/third_party/rust/syn/src/spanned.rs
@@ -74,17 +74,17 @@
 //! 10 |     bad_field: *const i32,
 //!    |                ^^^^^^^^^^ `*const i32` cannot be shared between threads safely
 //! ```
 //!
 //! In this technique, using the `Type`'s span for the error message makes the
 //! error appear in the correct place underlining the right type.
 
 use proc_macro2::{Span, TokenStream};
-use quote::{ToTokens, Tokens};
+use quote::ToTokens;
 
 /// A trait that can provide the `Span` of the complete contents of a syntax
 /// tree node.
 ///
 /// This trait is automatically implemented for all types that implement
 /// [`ToTokens`] from the `quote` crate.
 ///
 /// [`ToTokens`]: https://docs.rs/quote/0.4/quote/trait.ToTokens.html
@@ -104,40 +104,38 @@ pub trait Spanned {
 }
 
 impl<T> Spanned for T
 where
     T: ToTokens,
 {
     #[cfg(procmacro2_semver_exempt)]
     fn span(&self) -> Span {
-        let mut tokens = Tokens::new();
+        let mut tokens = TokenStream::new();
         self.to_tokens(&mut tokens);
-        let token_stream = TokenStream::from(tokens);
-        let mut iter = token_stream.into_iter();
+        let mut iter = tokens.into_iter();
         let mut span = match iter.next() {
             Some(tt) => tt.span(),
             None => {
                 return Span::call_site();
             }
         };
         for tt in iter {
             if let Some(joined) = span.join(tt.span()) {
                 span = joined;
             }
         }
         span
     }
 
     #[cfg(not(procmacro2_semver_exempt))]
     fn span(&self) -> Span {
-        let mut tokens = Tokens::new();
+        let mut tokens = TokenStream::new();
         self.to_tokens(&mut tokens);
-        let token_stream = TokenStream::from(tokens);
-        let mut iter = token_stream.into_iter();
+        let mut iter = tokens.into_iter();
 
         // We can't join spans without procmacro2_semver_exempt so just grab the
         // first one.
         match iter.next() {
             Some(tt) => tt.span(),
             None => Span::call_site(),
         }
     }
--- a/third_party/rust/syn/src/synom.rs
+++ b/third_party/rust/syn/src/synom.rs
@@ -86,29 +86,29 @@
 //! # extern crate proc_macro2;
 //! # use proc_macro2::TokenStream;
 //! #
 //! use syn::synom::Parser;
 //! use syn::punctuated::Punctuated;
 //! use syn::{PathSegment, Expr, Attribute};
 //!
 //! # fn run_parsers() -> Result<(), syn::synom::ParseError> {
-//! #     let tokens = TokenStream::empty().into();
+//! #     let tokens = TokenStream::new().into();
 //! // Parse a nonempty sequence of path segments separated by `::` punctuation
 //! // with no trailing punctuation.
 //! let parser = Punctuated::<PathSegment, Token![::]>::parse_separated_nonempty;
 //! let path = parser.parse(tokens)?;
 //!
-//! #     let tokens = TokenStream::empty().into();
+//! #     let tokens = TokenStream::new().into();
 //! // Parse a possibly empty sequence of expressions terminated by commas with
 //! // an optional trailing punctuation.
 //! let parser = Punctuated::<Expr, Token![,]>::parse_terminated;
 //! let args = parser.parse(tokens)?;
 //!
-//! #     let tokens = TokenStream::empty().into();
+//! #     let tokens = TokenStream::new().into();
 //! // Parse zero or more outer attributes but not inner attributes.
 //! named!(outer_attrs -> Vec<Attribute>, many0!(Attribute::parse_outer));
 //! let attrs = outer_attrs.parse(tokens)?;
 //! #
 //! #     Ok(())
 //! # }
 //! #
 //! # fn main() {}
@@ -126,16 +126,17 @@
 //! example is provided for each one.
 //!
 //! - [`alt!`](../macro.alt.html)
 //! - [`braces!`](../macro.braces.html)
 //! - [`brackets!`](../macro.brackets.html)
 //! - [`call!`](../macro.call.html)
 //! - [`cond!`](../macro.cond.html)
 //! - [`cond_reduce!`](../macro.cond_reduce.html)
+//! - [`custom_keyword!`](../macro.custom_keyword.html)
 //! - [`do_parse!`](../macro.do_parse.html)
 //! - [`epsilon!`](../macro.epsilon.html)
 //! - [`input_end!`](../macro.input_end.html)
 //! - [`keyword!`](../macro.keyword.html)
 //! - [`many0!`](../macro.many0.html)
 //! - [`map!`](../macro.map.html)
 //! - [`not!`](../macro.not.html)
 //! - [`option!`](../macro.option.html)
@@ -146,18 +147,19 @@
 //! - [`syn!`](../macro.syn.html)
 //! - [`tuple!`](../macro.tuple.html)
 //! - [`value!`](../macro.value.html)
 //!
 //! *This module is available if Syn is built with the `"parsing"` feature.*
 
 #[cfg(feature = "proc-macro")]
 use proc_macro;
-use proc_macro2;
+use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, TokenStream, TokenTree};
 
+use error::parse_error;
 pub use error::{PResult, ParseError};
 
 use buffer::{Cursor, TokenBuffer};
 
 /// Parsing interface implemented by all types that can be parsed in a default
 /// way from a token stream.
 ///
 /// Refer to the [module documentation] for details about parsing in Syn.
@@ -197,40 +199,129 @@ pub trait Synom: Sized {
     ///     }
     /// }
     /// ```
     fn description() -> Option<&'static str> {
         None
     }
 }
 
-impl Synom for proc_macro2::TokenStream {
+impl Synom for TokenStream {
     fn parse(input: Cursor) -> PResult<Self> {
         Ok((input.token_stream(), Cursor::empty()))
     }
 
     fn description() -> Option<&'static str> {
         Some("arbitrary token stream")
     }
 }
 
+impl Synom for TokenTree {
+    fn parse(input: Cursor) -> PResult<Self> {
+        match input.token_tree() {
+            Some((tt, rest)) => Ok((tt, rest)),
+            None => parse_error(),
+        }
+    }
+
+    fn description() -> Option<&'static str> {
+        Some("token tree")
+    }
+}
+
+impl Synom for Group {
+    fn parse(input: Cursor) -> PResult<Self> {
+        for delim in &[Delimiter::Parenthesis, Delimiter::Brace, Delimiter::Bracket] {
+            match input.group(*delim) {
+                Some((inside, span, rest)) => {
+                    let mut group = Group::new(*delim, inside.token_stream());
+                    group.set_span(span);
+                    return Ok((group, rest));
+                }
+                None => {}
+            }
+        }
+        parse_error()
+    }
+
+    fn description() -> Option<&'static str> {
+        Some("group token")
+    }
+}
+
+impl Synom for Ident {
+    fn parse(input: Cursor) -> PResult<Self> {
+        let (ident, rest) = match input.ident() {
+            Some(ident) => ident,
+            _ => return parse_error(),
+        };
+        match &ident.to_string()[..] {
+			"_"
+			// From https://doc.rust-lang.org/grammar.html#keywords
+			| "abstract" | "alignof" | "as" | "become" | "box" | "break" | "const"
+			| "continue" | "crate" | "do" | "else" | "enum" | "extern" | "false" | "final"
+			| "fn" | "for" | "if" | "impl" | "in" | "let" | "loop" | "macro" | "match"
+			| "mod" | "move" | "mut" | "offsetof" | "override" | "priv" | "proc" | "pub"
+			| "pure" | "ref" | "return" | "Self" | "self" | "sizeof" | "static" | "struct"
+			| "super" | "trait" | "true" | "type" | "typeof" | "unsafe" | "unsized" | "use"
+			| "virtual" | "where" | "while" | "yield" => return parse_error(),
+			_ => {}
+		}
+
+        Ok((ident, rest))
+    }
+
+    fn description() -> Option<&'static str> {
+        Some("identifier")
+    }
+}
+
+impl Synom for Punct {
+    fn parse(input: Cursor) -> PResult<Self> {
+        match input.punct() {
+            Some((punct, rest)) => Ok((punct, rest)),
+            None => parse_error(),
+        }
+    }
+
+    fn description() -> Option<&'static str> {
+        Some("punctuation token")
+    }
+}
+
+impl Synom for Literal {
+    fn parse(input: Cursor) -> PResult<Self> {
+        match input.literal() {
+            Some((literal, rest)) => Ok((literal, rest)),
+            None => parse_error(),
+        }
+    }
+
+    fn description() -> Option<&'static str> {
+        Some("literal token")
+    }
+}
+
 /// Parser that can parse Rust tokens into a particular syntax tree node.
 ///
 /// Refer to the [module documentation] for details about parsing in Syn.
 ///
 /// [module documentation]: index.html
 ///
 /// *This trait is available if Syn is built with the `"parsing"` feature.*
 pub trait Parser: Sized {
     type Output;
 
     /// Parse a proc-macro2 token stream into the chosen syntax tree node.
-    fn parse2(self, tokens: proc_macro2::TokenStream) -> Result<Self::Output, ParseError>;
+    fn parse2(self, tokens: TokenStream) -> Result<Self::Output, ParseError>;
 
     /// Parse tokens of source code into the chosen syntax tree node.
+    ///
+    /// *This method is available if Syn is built with both the `"parsing"` and
+    /// `"proc-macro"` features.*
     #[cfg(feature = "proc-macro")]
     fn parse(self, tokens: proc_macro::TokenStream) -> Result<Self::Output, ParseError> {
         self.parse2(tokens.into())
     }
 
     /// Parse a string of Rust code into the chosen syntax tree node.
     ///
     /// # Hygiene
@@ -240,24 +331,85 @@ pub trait Parser: Sized {
     fn parse_str(self, s: &str) -> Result<Self::Output, ParseError> {
         match s.parse() {
             Ok(tts) => self.parse2(tts),
             Err(_) => Err(ParseError::new("error while lexing input string")),
         }
     }
 }
 
-impl<F, T> Parser for F where F: FnOnce(Cursor) -> PResult<T> {
+impl<F, T> Parser for F
+where
+    F: FnOnce(Cursor) -> PResult<T>,
+{
     type Output = T;
 
-    fn parse2(self, tokens: proc_macro2::TokenStream) -> Result<T, ParseError> {
+    fn parse2(self, tokens: TokenStream) -> Result<T, ParseError> {
         let buf = TokenBuffer::new2(tokens);
         let (t, rest) = self(buf.begin())?;
         if rest.eof() {
             Ok(t)
         } else if rest == buf.begin() {
             // parsed nothing
             Err(ParseError::new("failed to parse anything"))
         } else {
             Err(ParseError::new("failed to parse all tokens"))
         }
     }
 }
+
+/// Extension traits that are made available within the `call!` parser.
+///
+/// *This module is available if Syn is built with the `"parsing"` feature.*
+pub mod ext {
+    use super::*;
+    use proc_macro2::Ident;
+
+    /// Additional parsing methods for `Ident`.
+    ///
+    /// This trait is sealed and cannot be implemented for types outside of Syn.
+    ///
+    /// *This trait is available if Syn is built with the `"parsing"` feature.*
+    pub trait IdentExt: Sized + private::Sealed {
+        /// Parses any identifier including keywords.
+        ///
+        /// This is useful when parsing a DSL which allows Rust keywords as
+        /// identifiers.
+        ///
+        /// ```rust
+        /// #[macro_use]
+        /// extern crate syn;
+        ///
+        /// use syn::Ident;
+        ///
+        /// // Parses input that looks like `name = NAME` where `NAME` can be
+        /// // any identifier.
+        /// //
+        /// // Examples:
+        /// //
+        /// //     name = anything
+        /// //     name = impl
+        /// named!(parse_dsl -> Ident, do_parse!(
+        ///     custom_keyword!(name) >>
+        ///     punct!(=) >>
+        ///     name: call!(Ident::parse_any) >>
+        ///     (name)
+        /// ));
+        /// #
+        /// # fn main() {}
+        /// ```
+        fn parse_any(input: Cursor) -> PResult<Self>;
+    }
+
+    impl IdentExt for Ident {
+        fn parse_any(input: Cursor) -> PResult<Self> {
+            input.ident().map_or_else(parse_error, Ok)
+        }
+    }
+
+    mod private {
+        use proc_macro2::Ident;
+
+        pub trait Sealed {}
+
+        impl Sealed for Ident {}
+    }
+}
--- a/third_party/rust/syn/src/token.rs
+++ b/third_party/rust/syn/src/token.rs
@@ -92,38 +92,39 @@
 //!             ty: Box::new(ty), eq_token, expr: Box::new(expr), semi_token,
 //!         })
 //!     ));
 //! }
 //! #
 //! # fn main() {}
 //! ```
 
-use proc_macro2::Span;
+use proc_macro2::{Ident, Span};
 
 macro_rules! tokens {
     (
         punct: {
             $($punct:tt pub struct $punct_name:ident/$len:tt #[$punct_doc:meta])*
         }
         delimiter: {
             $($delimiter:tt pub struct $delimiter_name:ident #[$delimiter_doc:meta])*
         }
         keyword: {
             $($keyword:tt pub struct $keyword_name:ident #[$keyword_doc:meta])*
         }
     ) => (
-        $(token_punct! { #[$punct_doc] $punct pub struct $punct_name/$len })*
+        $(token_punct_def! { #[$punct_doc] pub struct $punct_name/$len })*
+        $(token_punct_parser! { $punct pub struct $punct_name })*
         $(token_delimiter! { #[$delimiter_doc] $delimiter pub struct $delimiter_name })*
         $(token_keyword! { #[$keyword_doc] $keyword pub struct $keyword_name })*
     )
 }
 
-macro_rules! token_punct {
-    (#[$doc:meta] $s:tt pub struct $name:ident/$len:tt) => {
+macro_rules! token_punct_def {
+    (#[$doc:meta]pub struct $name:ident / $len:tt) => {
         #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
         #[$doc]
         ///
         /// Don't try to remember the name of this type -- use the [`Token!`]
         /// macro instead.
         ///
         /// [`Token!`]: index.html
         pub struct $name(pub [Span; $len]);
@@ -155,44 +156,50 @@ macro_rules! token_punct {
             fn eq(&self, _other: &$name) -> bool {
                 true
             }
         }
 
         #[cfg(feature = "extra-traits")]
         impl ::std::hash::Hash for $name {
             fn hash<H>(&self, _state: &mut H)
-                where H: ::std::hash::Hasher
-            {}
+            where
+                H: ::std::hash::Hasher,
+            {
+            }
         }
 
+        impl From<Span> for $name {
+            fn from(span: Span) -> Self {
+                $name([span; $len])
+            }
+        }
+    };
+}
+
+macro_rules! token_punct_parser {
+    ($s:tt pub struct $name:ident) => {
         #[cfg(feature = "printing")]
         impl ::quote::ToTokens for $name {
-            fn to_tokens(&self, tokens: &mut ::quote::Tokens) {
+            fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {
                 printing::punct($s, &self.0, tokens);
             }
         }
 
         #[cfg(feature = "parsing")]
         impl ::Synom for $name {
             fn parse(tokens: $crate::buffer::Cursor) -> $crate::synom::PResult<$name> {
                 parsing::punct($s, tokens, $name)
             }
 
             fn description() -> Option<&'static str> {
                 Some(concat!("`", $s, "`"))
             }
         }
-
-        impl From<Span> for $name {
-            fn from(span: Span) -> Self {
-                $name([span; $len])
-            }
-        }
-    }
+    };
 }
 
 macro_rules! token_keyword {
     (#[$doc:meta] $s:tt pub struct $name:ident) => {
         #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
         #[$doc]
         ///
         /// Don't try to remember the name of this type -- use the [`Token!`]
@@ -222,23 +229,25 @@ macro_rules! token_keyword {
             fn eq(&self, _other: &$name) -> bool {
                 true
             }
         }
 
         #[cfg(feature = "extra-traits")]
         impl ::std::hash::Hash for $name {
             fn hash<H>(&self, _state: &mut H)
-                where H: ::std::hash::Hasher
-            {}
+            where
+                H: ::std::hash::Hasher,
+            {
+            }
         }
 
         #[cfg(feature = "printing")]
         impl ::quote::ToTokens for $name {
-            fn to_tokens(&self, tokens: &mut ::quote::Tokens) {
+            fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {
                 printing::keyword($s, &self.0, tokens);
             }
         }
 
         #[cfg(feature = "parsing")]
         impl ::Synom for $name {
             fn parse(tokens: $crate::buffer::Cursor) -> $crate::synom::PResult<$name> {
                 parsing::keyword($s, tokens, $name)
@@ -249,17 +258,17 @@ macro_rules! token_keyword {
             }
         }
 
         impl From<Span> for $name {
             fn from(span: Span) -> Self {
                 $name(span)
             }
         }
-    }
+    };
 }
 
 macro_rules! token_delimiter {
     (#[$doc:meta] $s:tt pub struct $name:ident) => {
         #[cfg_attr(feature = "clone-impls", derive(Copy, Clone))]
         #[$doc]
         pub struct $name(pub Span);
 
@@ -284,43 +293,124 @@ macro_rules! token_delimiter {
             fn eq(&self, _other: &$name) -> bool {
                 true
             }
         }
 
         #[cfg(feature = "extra-traits")]
         impl ::std::hash::Hash for $name {
             fn hash<H>(&self, _state: &mut H)
-                where H: ::std::hash::Hasher
-            {}
+            where
+                H: ::std::hash::Hasher,
+            {
+            }
         }
 
         impl $name {
             #[cfg(feature = "printing")]
-            pub fn surround<F>(&self,
-                               tokens: &mut ::quote::Tokens,
-                               f: F)
-                where F: FnOnce(&mut ::quote::Tokens)
+            pub fn surround<F>(&self, tokens: &mut ::proc_macro2::TokenStream, f: F)
+            where
+                F: FnOnce(&mut ::proc_macro2::TokenStream),
             {
                 printing::delim($s, &self.0, tokens, f);
             }
 
             #[cfg(feature = "parsing")]
-            pub fn parse<F, R>(tokens: $crate::buffer::Cursor, f: F) -> $crate::synom::PResult<($name, R)>
-                where F: FnOnce($crate::buffer::Cursor) -> $crate::synom::PResult<R>
+            pub fn parse<F, R>(
+                tokens: $crate::buffer::Cursor,
+                f: F,
+            ) -> $crate::synom::PResult<($name, R)>
+            where
+                F: FnOnce($crate::buffer::Cursor) -> $crate::synom::PResult<R>,
             {
                 parsing::delim($s, tokens, $name, f)
             }
         }
 
         impl From<Span> for $name {
             fn from(span: Span) -> Self {
                 $name(span)
             }
         }
+    };
+}
+
+token_punct_def! {
+    /// `_`
+    pub struct Underscore/1
+}
+
+#[cfg(feature = "printing")]
+impl ::quote::ToTokens for Underscore {
+    fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {
+        use quote::TokenStreamExt;
+        tokens.append(::proc_macro2::Ident::new("_", self.0[0]));
+    }
+}
+
+#[cfg(feature = "parsing")]
+impl ::Synom for Underscore {
+    fn parse(input: ::buffer::Cursor) -> ::synom::PResult<Underscore> {
+        match input.ident() {
+            Some((ident, rest)) => {
+                if ident == "_" {
+                    Ok((Underscore([ident.span()]), rest))
+                } else {
+                    ::parse_error()
+                }
+            }
+            None => parsing::punct("_", input, Underscore),
+        }
+    }
+
+    fn description() -> Option<&'static str> {
+        Some("`_`")
+    }
+}
+
+token_punct_def! {
+    /// `'`
+    pub struct Apostrophe/1
+}
+
+// Implement Clone anyway because it is required for cloning Lifetime.
+#[cfg(not(feature = "clone-impls"))]
+impl Clone for Apostrophe {
+    fn clone(&self) -> Self {
+        Apostrophe(self.0)
+    }
+}
+
+#[cfg(feature = "printing")]
+impl ::quote::ToTokens for Apostrophe {
+    fn to_tokens(&self, tokens: &mut ::proc_macro2::TokenStream) {
+        use quote::TokenStreamExt;
+        let mut token = ::proc_macro2::Punct::new('\'', ::proc_macro2::Spacing::Joint);
+        token.set_span(self.0[0]);
+        tokens.append(token);
+    }
+}
+
+#[cfg(feature = "parsing")]
+impl ::Synom for Apostrophe {
+    fn parse(input: ::buffer::Cursor) -> ::synom::PResult<Apostrophe> {
+        match input.punct() {
+            Some((op, rest)) => {
+                if op.as_char() == '\'' && op.spacing() == ::proc_macro2::Spacing::Joint {
+                    Ok((Apostrophe([op.span()]), rest))
+                } else {
+                    ::parse_error()
+                }
+            }
+            None => ::parse_error(),
+        }
+    }
+
+    fn description() -> Option<&'static str> {
+        Some("`'`")
     }
 }
 
 tokens! {
     punct: {
         "+"        pub struct Add/1        /// `+`
         "+="       pub struct AddEq/2      /// `+=`
         "&"        pub struct And/1        /// `&`
@@ -330,16 +420,17 @@ tokens! {
         "!"        pub struct Bang/1       /// `!`
         "^"        pub struct Caret/1      /// `^`
         "^="       pub struct CaretEq/2    /// `^=`
         ":"        pub struct Colon/1      /// `:`
         "::"       pub struct Colon2/2     /// `::`
         ","        pub struct Comma/1      /// `,`
         "/"        pub struct Div/1        /// `/`
         "/="       pub struct DivEq/2      /// `/=`
+        "$"        pub struct Dollar/1     /// `$`
         "."        pub struct Dot/1        /// `.`
         ".."       pub struct Dot2/2       /// `..`
         "..."      pub struct Dot3/3       /// `...`
         "..="      pub struct DotDotEq/3   /// `..=`
         "="        pub struct Eq/1         /// `=`
         "=="       pub struct EqEq/2       /// `==`
         ">="       pub struct Ge/2         /// `>=`
         ">"        pub struct Gt/1         /// `>`
@@ -360,17 +451,16 @@ tokens! {
         ";"        pub struct Semi/1       /// `;`
         "<<"       pub struct Shl/2        /// `<<`
         "<<="      pub struct ShlEq/3      /// `<<=`
         ">>"       pub struct Shr/2        /// `>>`
         ">>="      pub struct ShrEq/3      /// `>>=`
         "*"        pub struct Star/1       /// `*`
         "-"        pub struct Sub/1        /// `-`
         "-="       pub struct SubEq/2      /// `-=`
-        "_"        pub struct Underscore/1 /// `_`
     }
     delimiter: {
         "{"        pub struct Brace        /// `{...}`
         "["        pub struct Bracket      /// `[...]`
         "("        pub struct Paren        /// `(...)`
         " "        pub struct Group        /// None-delimited group
     }
     keyword: {
@@ -423,16 +513,17 @@ tokens! {
 /// given token.
 ///
 /// See the [token module] documentation for details and examples.
 ///
 /// [token module]: token/index.html
 // Unfortunate duplication due to a rustdoc bug.
 // https://github.com/rust-lang/rust/issues/45939
 #[macro_export]
+#[cfg_attr(rustfmt, rustfmt_skip)]
 macro_rules! Token {
     (+)        => { $crate::token::Add };
     (+=)       => { $crate::token::AddEq };
     (&)        => { $crate::token::And };
     (&&)       => { $crate::token::AndAnd };
     (&=)       => { $crate::token::AndEq };
     (@)        => { $crate::token::At };
     (!)        => { $crate::token::Bang };
@@ -522,16 +613,17 @@ macro_rules! Token {
 ///
 /// See the [token module] documentation for details and examples.
 ///
 /// [token module]: token/index.html
 ///
 /// *This macro is available if Syn is built with the `"parsing"` feature.*
 #[cfg(feature = "parsing")]
 #[macro_export]
+#[cfg_attr(rustfmt, rustfmt_skip)]
 macro_rules! punct {
     ($i:expr, +)   => { call!($i, <$crate::token::Add as $crate::synom::Synom>::parse) };
     ($i:expr, +=)  => { call!($i, <$crate::token::AddEq as $crate::synom::Synom>::parse) };
     ($i:expr, &)   => { call!($i, <$crate::token::And as $crate::synom::Synom>::parse) };
     ($i:expr, &&)  => { call!($i, <$crate::token::AndAnd as $crate::synom::Synom>::parse) };
     ($i:expr, &=)  => { call!($i, <$crate::token::AndEq as $crate::synom::Synom>::parse) };
     ($i:expr, @)   => { call!($i, <$crate::token::At as $crate::synom::Synom>::parse) };
     ($i:expr, !)   => { call!($i, <$crate::token::Bang as $crate::synom::Synom>::parse) };
@@ -579,16 +671,17 @@ macro_rules! punct {
 ///
 /// See the [token module] documentation for details and examples.
 ///
 /// [token module]: token/index.html
 ///
 /// *This macro is available if Syn is built with the `"parsing"` feature.*
 #[cfg(feature = "parsing")]
 #[macro_export]
+#[cfg_attr(rustfmt, rustfmt_skip)]
 macro_rules! keyword {
     ($i:expr, as)       => { call!($i, <$crate::token::As as $crate::synom::Synom>::parse) };
     ($i:expr, auto)     => { call!($i, <$crate::token::Auto as $crate::synom::Synom>::parse) };
     ($i:expr, box)      => { call!($i, <$crate::token::Box as $crate::synom::Synom>::parse) };
     ($i:expr, break)    => { call!($i, <$crate::token::Break as $crate::synom::Synom>::parse) };
     ($i:expr, Self)     => { call!($i, <$crate::token::CapSelf as $crate::synom::Synom>::parse) };
     ($i:expr, catch)    => { call!($i, <$crate::token::Catch as $crate::synom::Synom>::parse) };
     ($i:expr, const)    => { call!($i, <$crate::token::Const as $crate::synom::Synom>::parse) };
@@ -624,16 +717,31 @@ macro_rules! keyword {
     ($i:expr, union)    => { call!($i, <$crate::token::Union as $crate::synom::Synom>::parse) };
     ($i:expr, unsafe)   => { call!($i, <$crate::token::Unsafe as $crate::synom::Synom>::parse) };
     ($i:expr, use)      => { call!($i, <$crate::token::Use as $crate::synom::Synom>::parse) };
     ($i:expr, where)    => { call!($i, <$crate::token::Where as $crate::synom::Synom>::parse) };
     ($i:expr, while)    => { call!($i, <$crate::token::While as $crate::synom::Synom>::parse) };
     ($i:expr, yield)    => { call!($i, <$crate::token::Yield as $crate::synom::Synom>::parse) };
 }
 
+macro_rules! ident_from_token {
+    ($token:ident) => {
+        impl From<Token![$token]> for Ident {
+            fn from(token: Token![$token]) -> Ident {
+                Ident::new(stringify!($token), token.0)
+            }
+        }
+    };
+}
+
+ident_from_token!(self);
+ident_from_token!(Self);
+ident_from_token!(super);
+ident_from_token!(crate);
+
 #[cfg(feature = "parsing")]
 mod parsing {
     use proc_macro2::{Delimiter, Spacing, Span};
 
     use buffer::Cursor;
     use parse_error;
     use synom::PResult;
 
@@ -663,37 +771,41 @@ mod parsing {
     where
         T: FromSpans,
     {
         let mut spans = [Span::call_site(); 3];
         assert!(s.len() <= spans.len());
         let chars = s.chars();
 
         for (i, (ch, slot)) in chars.zip(&mut spans).enumerate() {
-            match tokens.op() {
-                Some((op, rest)) if op.op() == ch => {
-                    if i != s.len() - 1 {
-                        match op.spacing() {
-                            Spacing::Joint => {}
-                            _ => return parse_error(),
+            match tokens.punct() {
+                Some((op, rest)) => {
+                    if op.as_char() == ch {
+                        if i != s.len() - 1 {
+                            match op.spacing() {
+                                Spacing::Joint => {}
+                                _ => return parse_error(),
+                            }
                         }
+                        *slot = op.span();
+                        tokens = rest;
+                    } else {
+                        return parse_error();
                     }
-                    *slot = op.span();
-                    tokens = rest;
                 }
                 _ => return parse_error(),
             }
         }
         Ok((new(T::from_spans(&spans)), tokens))
     }
 
     pub fn keyword<'a, T>(keyword: &str, tokens: Cursor<'a>, new: fn(Span) -> T) -> PResult<'a, T> {
-        if let Some((term, rest)) = tokens.term() {
-            if term.as_str() == keyword {
-                return Ok((new(term.span()), rest));
+        if let Some((ident, rest)) = tokens.ident() {
+            if ident == keyword {
+                return Ok((new(ident.span()), rest));
             }
         }
         parse_error()
     }
 
     pub fn delim<'a, F, R, T>(
         delim: &str,
         tokens: Cursor<'a>,
@@ -723,51 +835,51 @@ mod parsing {
             }
         }
         parse_error()
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
-    use proc_macro2::{Delimiter, Spacing, Span, Term, Op, Group};
-    use quote::Tokens;
+    use proc_macro2::{Delimiter, Group, Ident, Punct, Spacing, Span, TokenStream};
+    use quote::TokenStreamExt;
 
-    pub fn punct(s: &str, spans: &[Span], tokens: &mut Tokens) {
+    pub fn punct(s: &str, spans: &[Span], tokens: &mut TokenStream) {
         assert_eq!(s.len(), spans.len());
 
         let mut chars = s.chars();
         let mut spans = spans.iter();
         let ch = chars.next_back().unwrap();
         let span = spans.next_back().unwrap();
         for (ch, span) in chars.zip(spans) {
-            let mut op = Op::new(ch, Spacing::Joint);
+            let mut op = Punct::new(ch, Spacing::Joint);
             op.set_span(*span);
             tokens.append(op);
         }
 
-        let mut op = Op::new(ch, Spacing::Alone);
+        let mut op = Punct::new(ch, Spacing::Alone);
         op.set_span(*span);
         tokens.append(op);
     }
 
-    pub fn keyword(s: &str, span: &Span, tokens: &mut Tokens) {
-        tokens.append(Term::new(s, *span));
+    pub fn keyword(s: &str, span: &Span, tokens: &mut TokenStream) {
+        tokens.append(Ident::new(s, *span));
     }
 
-    pub fn delim<F>(s: &str, span: &Span, tokens: &mut Tokens, f: F)
+    pub fn delim<F>(s: &str, span: &Span, tokens: &mut TokenStream, f: F)
     where
-        F: FnOnce(&mut Tokens),
+        F: FnOnce(&mut TokenStream),
     {
         let delim = match s {
             "(" => Delimiter::Parenthesis,
             "[" => Delimiter::Bracket,
             "{" => Delimiter::Brace,
             " " => Delimiter::None,
             _ => panic!("unknown delimiter: {}", s),
         };
-        let mut inner = Tokens::new();
+        let mut inner = TokenStream::new();
         f(&mut inner);
-        let mut g = Group::new(delim, inner.into());
+        let mut g = Group::new(delim, inner);
         g.set_span(*span);
         tokens.append(g);
     }
 }
--- a/third_party/rust/syn/src/tt.rs
+++ b/third_party/rust/syn/src/tt.rs
@@ -27,36 +27,36 @@ pub fn delimited(input: Cursor) -> PResu
         let span = g.span();
         let delimiter = match g.delimiter() {
             Delimiter::Parenthesis => MacroDelimiter::Paren(Paren(span)),
             Delimiter::Brace => MacroDelimiter::Brace(Brace(span)),
             Delimiter::Bracket => MacroDelimiter::Bracket(Bracket(span)),
             Delimiter::None => return parse_error(),
         };
 
-        return Ok(((delimiter, g.stream().clone()), rest))
+        return Ok(((delimiter, g.stream().clone()), rest));
     }
     parse_error()
 }
 
 #[cfg(all(feature = "full", feature = "parsing"))]
 pub fn braced(input: Cursor) -> PResult<(Brace, TokenStream)> {
     if let Some((TokenTree::Group(g), rest)) = input.token_tree() {
         if g.delimiter() == Delimiter::Brace {
-            return Ok(((Brace(g.span()), g.stream().clone()), rest))
+            return Ok(((Brace(g.span()), g.stream().clone()), rest));
         }
     }
     parse_error()
 }
 
 #[cfg(all(feature = "full", feature = "parsing"))]
 pub fn parenthesized(input: Cursor) -> PResult<(Paren, TokenStream)> {
     if let Some((TokenTree::Group(g), rest)) = input.token_tree() {
         if g.delimiter() == Delimiter::Parenthesis {
-            return Ok(((Paren(g.span()), g.stream().clone()), rest))
+            return Ok(((Paren(g.span()), g.stream().clone()), rest));
         }
     }
     parse_error()
 }
 
 #[cfg(feature = "extra-traits")]
 pub struct TokenTreeHelper<'a>(pub &'a TokenTree);
 
@@ -84,26 +84,26 @@ impl<'a> PartialEq for TokenTreeHelper<'
                         None => return false,
                     };
                     if TokenTreeHelper(&item1) != TokenTreeHelper(&item2) {
                         return false;
                     }
                 }
                 s2.next().is_none()
             }
-            (&TokenTree::Op(ref o1), &TokenTree::Op(ref o2)) => {
-                o1.op() == o2.op() && match (o1.spacing(), o2.spacing()) {
+            (&TokenTree::Punct(ref o1), &TokenTree::Punct(ref o2)) => {
+                o1.as_char() == o2.as_char() && match (o1.spacing(), o2.spacing()) {
                     (Spacing::Alone, Spacing::Alone) | (Spacing::Joint, Spacing::Joint) => true,
                     _ => false,
                 }
             }
             (&TokenTree::Literal(ref l1), &TokenTree::Literal(ref l2)) => {
                 l1.to_string() == l2.to_string()
             }
-            (&TokenTree::Term(ref s1), &TokenTree::Term(ref s2)) => s1.as_str() == s2.as_str(),
+            (&TokenTree::Ident(ref s1), &TokenTree::Ident(ref s2)) => s1 == s2,
             _ => false,
         }
     }
 }
 
 #[cfg(feature = "extra-traits")]
 impl<'a> Hash for TokenTreeHelper<'a> {
     fn hash<H: Hasher>(&self, h: &mut H) {
@@ -119,26 +119,26 @@ impl<'a> Hash for TokenTreeHelper<'a> {
                     Delimiter::None => 3u8.hash(h),
                 }
 
                 for item in g.stream().clone() {
                     TokenTreeHelper(&item).hash(h);
                 }
                 0xffu8.hash(h); // terminator w/ a variant we don't normally hash
             }
-            TokenTree::Op(ref op) => {
+            TokenTree::Punct(ref op) => {
                 1u8.hash(h);
-                op.op().hash(h);
+                op.as_char().hash(h);
                 match op.spacing() {
                     Spacing::Alone => 0u8.hash(h),
                     Spacing::Joint => 1u8.hash(h),
                 }
             }
             TokenTree::Literal(ref lit) => (2u8, lit.to_string()).hash(h),
-            TokenTree::Term(ref word) => (3u8, word.as_str()).hash(h),
+            TokenTree::Ident(ref word) => (3u8, word).hash(h),
         }
     }
 }
 
 #[cfg(feature = "extra-traits")]
 pub struct TokenStreamHelper<'a>(pub &'a TokenStream);
 
 #[cfg(feature = "extra-traits")]
--- a/third_party/rust/syn/src/ty.rs
+++ b/third_party/rust/syn/src/ty.rs
@@ -1,19 +1,19 @@
 // Copyright 2018 Syn Developers
 //
 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use punctuated::Punctuated;
 use super::*;
 use proc_macro2::TokenStream;
+use punctuated::Punctuated;
 #[cfg(feature = "extra-traits")]
 use std::hash::{Hash, Hasher};
 #[cfg(feature = "extra-traits")]
 use tt::TokenStreamHelper;
 
 ast_enum_of_structs! {
     /// The possible types that a Rust value could have.
     ///
@@ -244,18 +244,18 @@ ast_enum! {
         /// A particular type is returned.
         Type(Token![->], Box<Type>),
     }
 }
 
 #[cfg(feature = "parsing")]
 pub mod parsing {
     use super::*;
+    use path::parsing::qpath;
     use synom::Synom;
-    use path::parsing::qpath;
 
     impl Synom for Type {
         named!(parse -> Self, call!(ambig_ty, true));
 
         fn description() -> Option<&'static str> {
             Some("type")
         }
     }
@@ -389,17 +389,17 @@ pub mod parsing {
             unsafety: option!(keyword!(unsafe)) >>
             abi: option!(syn!(Abi)) >>
             fn_: keyword!(fn) >>
             parens: parens!(do_parse!(
                 inputs: call!(Punctuated::parse_terminated) >>
                 variadic: option!(cond_reduce!(inputs.empty_or_trailing(), punct!(...))) >>
                 (inputs, variadic)
             )) >>
-            output: syn!(ReturnType) >>
+            output: call!(ReturnType::without_plus) >>
             (TypeBareFn {
                 unsafety: unsafety,
                 abi: abi,
                 lifetimes: lifetimes,
                 output: output,
                 variadic: (parens.1).1,
                 fn_token: fn_,
                 paren_token: parens.0,
@@ -478,26 +478,31 @@ pub mod parsing {
                     let parenthesized = PathArguments::Parenthesized(parenthesized);
                     path.segments.last_mut().unwrap().value_mut().arguments = parenthesized;
                 }
                 TypePath { qself: qself, path: path }
             })
         ));
     }
 
-    impl Synom for ReturnType {
-        named!(parse -> Self, alt!(
+    impl ReturnType {
+        named!(pub without_plus -> Self, call!(Self::parse, false));
+        named!(parse(allow_plus: bool) -> Self, alt!(
             do_parse!(
                 arrow: punct!(->) >>
-                ty: syn!(Type) >>
+                ty: call!(ambig_ty, allow_plus) >>
                 (ReturnType::Type(arrow, Box::new(ty)))
             )
             |
             epsilon!() => { |_| ReturnType::Default }
         ));
+    }
+
+    impl Synom for ReturnType {
+        named!(parse -> Self, call!(Self::parse, true));
 
         fn description() -> Option<&'static str> {
             Some("return type")
         }
     }
 
     impl Synom for TypeTraitObject {
         named!(parse -> Self, call!(Self::parse, true));
@@ -636,60 +641,61 @@ pub mod parsing {
             Some("`extern` ABI qualifier")
         }
     }
 }
 
 #[cfg(feature = "printing")]
 mod printing {
     use super::*;
-    use quote::{ToTokens, Tokens};
+    use proc_macro2::TokenStream;
+    use quote::ToTokens;
 
     impl ToTokens for TypeSlice {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.bracket_token.surround(tokens, |tokens| {
                 self.elem.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for TypeArray {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.bracket_token.surround(tokens, |tokens| {
                 self.elem.to_tokens(tokens);
                 self.semi_token.to_tokens(tokens);
                 self.len.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for TypePtr {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.star_token.to_tokens(tokens);
             match self.mutability {
                 Some(ref tok) => tok.to_tokens(tokens),
                 None => {
                     TokensOrDefault(&self.const_token).to_tokens(tokens);
                 }
             }
             self.elem.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeReference {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.and_token.to_tokens(tokens);
             self.lifetime.to_tokens(tokens);
             self.mutability.to_tokens(tokens);
             self.elem.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeBareFn {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.lifetimes.to_tokens(tokens);
             self.unsafety.to_tokens(tokens);
             self.abi.to_tokens(tokens);
             self.fn_token.to_tokens(tokens);
             self.paren_token.surround(tokens, |tokens| {
                 self.inputs.to_tokens(tokens);
                 if let Some(ref variadic) = self.variadic {
                     if !self.inputs.empty_or_trailing() {
@@ -699,113 +705,113 @@ mod printing {
                     variadic.to_tokens(tokens);
                 }
             });
             self.output.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeNever {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.bang_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeTuple {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.paren_token.surround(tokens, |tokens| {
                 self.elems.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for TypePath {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             PathTokens(&self.qself, &self.path).to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeTraitObject {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.dyn_token.to_tokens(tokens);
             self.bounds.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeImplTrait {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.impl_token.to_tokens(tokens);
             self.bounds.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeGroup {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.group_token.surround(tokens, |tokens| {
                 self.elem.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for TypeParen {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.paren_token.surround(tokens, |tokens| {
                 self.elem.to_tokens(tokens);
             });
         }
     }
 
     impl ToTokens for TypeInfer {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.underscore_token.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeMacro {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.mac.to_tokens(tokens);
         }
     }
 
     impl ToTokens for TypeVerbatim {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.tts.to_tokens(tokens);
         }
     }
 
     impl ToTokens for ReturnType {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 ReturnType::Default => {}
                 ReturnType::Type(ref arrow, ref ty) => {
                     arrow.to_tokens(tokens);
                     ty.to_tokens(tokens);
                 }
             }
         }
     }
 
     impl ToTokens for BareFnArg {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             if let Some((ref name, ref colon)) = self.name {
                 name.to_tokens(tokens);
                 colon.to_tokens(tokens);
             }
             self.ty.to_tokens(tokens);
         }
     }
 
     impl ToTokens for BareFnArgName {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             match *self {
                 BareFnArgName::Named(ref t) => t.to_tokens(tokens),
                 BareFnArgName::Wild(ref t) => t.to_tokens(tokens),
             }
         }
     }
 
     impl ToTokens for Abi {
-        fn to_tokens(&self, tokens: &mut Tokens) {
+        fn to_tokens(&self, tokens: &mut TokenStream) {
             self.extern_token.to_tokens(tokens);
             self.name.to_tokens(tokens);
         }
     }
 }