Merge tag 'upstream/v1.0.44' into jsonrc-update-to-1.0.44

Release 1.0.44 of serde_json merged into serde_jsonrc.
diff --git a/.travis.yml b/.travis.yml
index ef58beb..5b40c48 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -4,27 +4,32 @@
   include:
     - rust: nightly
       script:
-        - cargo build
-        - cargo build --manifest-path tests/deps/Cargo.toml
         - cargo test
         - cargo test --features preserve_order
         - cargo test --features arbitrary_precision
         - cargo test --features raw_value
+        - cargo test --features unbounded_depth
 
     - rust: 1.15.0
       script:
         # preserve_order is not supported on 1.15.0
-        - cargo build
-        - cargo build --features arbitrary_precision
+        - cargo build --manifest-path tests/crate/Cargo.toml
+        - cargo build --manifest-path tests/crate/Cargo.toml --features arbitrary_precision
+
+    - rust: 1.18.0
+      script:
+        - cargo build --manifest-path tests/crate/Cargo.toml
+        - cargo build --manifest-path tests/crate/Cargo.toml --features preserve_order
+        - cargo build --manifest-path tests/crate/Cargo.toml --features arbitrary_precision
 
     - rust: stable
     - rust: beta
-    - rust: 1.18.0
+    - rust: 1.31.0
 
     - rust: nightly
       name: Clippy
       script:
-        - rustup component add clippy-preview || travis_terminate 0
+        - rustup component add clippy || travis_terminate 0
         - cargo clippy
 
 script:
diff --git a/Cargo.toml b/Cargo.toml
index e7432a4..20a6b76 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
 [package]
 name = "serde_jsonrc"
-version = "0.1.0" # remember to update html_root_url
+version = "0.1.1" # remember to update html_root_url
 authors = [
     "Michael Bolin <bolinfest@gmail.com>",
     "Erick Tryzelaar <erick.tryzelaar@gmail.com>",
@@ -17,17 +17,23 @@
 
 [dependencies]
 serde = "1.0.60"
-indexmap = { version = "1.0", optional = true }
+indexmap = { version = "1.2", optional = true }
 itoa = "0.4.3"
-ryu = "0.2"
+ryu = "1.0"
 
 [dev-dependencies]
-compiletest_rs = { version = "0.3", features = ["stable"] }
-serde_bytes = "0.10"
+automod = "0.1"
+rustversion = "1.0"
+serde_bytes = "0.11"
 serde_derive = "1.0"
+serde_stacker = "0.1"
+trybuild = "1.0"
+
+[workspace]
+members = ["tests/crate"]
 
 [package.metadata.docs.rs]
-features = ["raw_value"]
+features = ["raw_value", "unbounded_depth"]
 
 [package.metadata.playground]
 features = ["raw_value"]
@@ -50,3 +56,13 @@
 
 # Provide a RawValue type that can hold unprocessed JSON during deserialization.
 raw_value = []
+
+# Provide a method disable_recursion_limit to parse arbitrarily deep JSON
+# structures without any consideration for overflowing the stack. When using
+# this feature, you will want to provide some other way to protect against stack
+# overflows, such as by wrapping your Deserializer in the dynamically growing
+# stack adapter provided by the serde_stacker crate. Additionally you will need
+# to be careful around other recursive operations on the parsed result which may
+# overflow the stack after deserialization has completed, including, but not
+# limited to, Display and Debug and Drop impls.
+unbounded_depth = []
diff --git a/appveyor.yml b/appveyor.yml
index 648bc6f..e15b2f9 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -27,7 +27,6 @@
         - APPVEYOR_RUST_CHANNEL: nightly
     test_script:
       - cargo build
-      - cargo build --manifest-path tests/deps/Cargo.toml
       - cargo test
       - cargo test --features preserve_order
       - cargo test --features arbitrary_precision
diff --git a/src/de.rs b/src/de.rs
index 1dfee28..d1def35 100644
--- a/src/de.rs
+++ b/src/de.rs
@@ -25,6 +25,8 @@
     read: R,
     scratch: Vec<u8>,
     remaining_depth: u8,
+    #[cfg(feature = "unbounded_depth")]
+    disable_recursion_limit: bool,
 }
 
 impl<'de, R> Deserializer<R>
@@ -40,10 +42,23 @@
     ///   - Deserializer::from_bytes
     ///   - Deserializer::from_reader
     pub fn new(read: R) -> Self {
-        Deserializer {
-            read: read,
-            scratch: Vec::new(),
-            remaining_depth: 128,
+        #[cfg(not(feature = "unbounded_depth"))]
+        {
+            Deserializer {
+                read: read,
+                scratch: Vec::new(),
+                remaining_depth: 128,
+            }
+        }
+
+        #[cfg(feature = "unbounded_depth")]
+        {
+            Deserializer {
+                read: read,
+                scratch: Vec::new(),
+                remaining_depth: 128,
+                disable_recursion_limit: false,
+            }
         }
     }
 }
@@ -53,6 +68,10 @@
     R: io::Read,
 {
     /// Creates a JSON deserializer from an `io::Read`.
+    ///
+    /// Reader-based deserializers do not support deserializing borrowed types
+    /// like `&str`, since the `std::io::Read` trait has no non-copying methods
+    /// -- everything it does involves copying bytes out of the data source.
     pub fn from_reader(reader: R) -> Self {
         Deserializer::new(read::IoRead::new(reader))
     }
@@ -140,6 +159,54 @@
         }
     }
 
+    /// Parse arbitrarily deep JSON structures without any consideration for
+    /// overflowing the stack.
+    ///
+    /// You will want to provide some other way to protect against stack
+    /// overflows, such as by wrapping your Deserializer in the dynamically
+    /// growing stack adapter provided by the serde_stacker crate. Additionally
+    /// you will need to be careful around other recursive operations on the
+    /// parsed result which may overflow the stack after deserialization has
+    /// completed, including, but not limited to, Display and Debug and Drop
+    /// impls.
+    ///
+    /// *This method is only available if serde_json is built with the
+    /// `"unbounded_depth"` feature.*
+    ///
+    /// # Examples
+    ///
+    /// ```edition2018
+    /// use serde::Deserialize;
+    /// use serde_jsonrc::Value;
+    ///
+    /// fn main() {
+    ///     let mut json = String::new();
+    ///     for _ in 0..10000 {
+    ///         json = format!("[{}]", json);
+    ///     }
+    ///
+    ///     let mut deserializer = serde_jsonrc::Deserializer::from_str(&json);
+    ///     deserializer.disable_recursion_limit();
+    ///     let deserializer = serde_stacker::Deserializer::new(&mut deserializer);
+    ///     let value = Value::deserialize(deserializer).unwrap();
+    ///
+    ///     carefully_drop_nested_arrays(value);
+    /// }
+    ///
+    /// fn carefully_drop_nested_arrays(value: Value) {
+    ///     let mut stack = vec![value];
+    ///     while let Some(value) = stack.pop() {
+    ///         if let Value::Array(array) = value {
+    ///             stack.extend(array);
+    ///         }
+    ///     }
+    /// }
+    /// ```
+    #[cfg(feature = "unbounded_depth")]
+    pub fn disable_recursion_limit(&mut self) {
+        self.disable_recursion_limit = true;
+    }
+
     fn peek(&mut self) -> Result<Option<u8>> {
         self.read.peek()
     }
@@ -370,7 +437,14 @@
     }
 
     fn parse_integer(&mut self, positive: bool) -> Result<ParserNumber> {
-        match try!(self.next_char_or_null()) {
+        let next = match try!(self.next_char()) {
+            Some(b) => b,
+            None => {
+                return Err(self.error(ErrorCode::EofWhileParsingValue));
+            }
+        };
+
+        match next {
             b'0' => {
                 // There can be only one leading '0'.
                 match try!(self.peek_or_null()) {
@@ -488,7 +562,10 @@
         }
 
         if !at_least_one_digit {
-            return Err(self.peek_error(ErrorCode::InvalidNumber));
+            match try!(self.peek()) {
+                Some(_) => return Err(self.peek_error(ErrorCode::InvalidNumber)),
+                None => return Err(self.peek_error(ErrorCode::EofWhileParsingValue)),
+            }
         }
 
         match try!(self.peek_or_null()) {
@@ -517,8 +594,15 @@
             _ => true,
         };
 
+        let next = match try!(self.next_char()) {
+            Some(b) => b,
+            None => {
+                return Err(self.error(ErrorCode::EofWhileParsingValue));
+            }
+        };
+
         // Make sure a digit follows the exponent place.
-        let mut exp = match try!(self.next_char_or_null()) {
+        let mut exp = match next {
             c @ b'0'...b'9' => (c - b'0') as i32,
             _ => {
                 return Err(self.error(ErrorCode::InvalidNumber));
@@ -615,19 +699,19 @@
     }
 
     #[cfg(feature = "arbitrary_precision")]
-    fn scan_or_null(&mut self, buf: &mut String) -> Result<u8> {
+    fn scan_or_eof(&mut self, buf: &mut String) -> Result<u8> {
         match try!(self.next_char()) {
             Some(b) => {
                 buf.push(b as char);
                 Ok(b)
             }
-            None => Ok(b'\x00'),
+            None => Err(self.error(ErrorCode::EofWhileParsingValue)),
         }
     }
 
     #[cfg(feature = "arbitrary_precision")]
     fn scan_integer(&mut self, buf: &mut String) -> Result<()> {
-        match try!(self.scan_or_null(buf)) {
+        match try!(self.scan_or_eof(buf)) {
             b'0' => {
                 // There can be only one leading '0'.
                 match try!(self.peek_or_null()) {
@@ -672,7 +756,10 @@
         }
 
         if !at_least_one_digit {
-            return Err(self.peek_error(ErrorCode::InvalidNumber));
+            match try!(self.peek()) {
+                Some(_) => return Err(self.peek_error(ErrorCode::InvalidNumber)),
+                None => return Err(self.peek_error(ErrorCode::EofWhileParsingValue)),
+            }
         }
 
         match try!(self.peek_or_null()) {
@@ -698,7 +785,7 @@
         }
 
         // Make sure a digit follows the exponent place.
-        match try!(self.scan_or_null(buf)) {
+        match try!(self.scan_or_eof(buf)) {
             b'0'...b'9' => {}
             _ => {
                 return Err(self.error(ErrorCode::InvalidNumber));
@@ -1038,6 +1125,39 @@
     }
 }
 
+#[cfg(not(feature = "unbounded_depth"))]
+macro_rules! if_checking_recursion_limit {
+    ($($body:tt)*) => {
+        $($body)*
+    };
+}
+
+#[cfg(feature = "unbounded_depth")]
+macro_rules! if_checking_recursion_limit {
+    ($this:ident $($body:tt)*) => {
+        if !$this.disable_recursion_limit {
+            $this $($body)*
+        }
+    };
+}
+
+macro_rules! check_recursion {
+    ($this:ident $($body:tt)*) => {
+        if_checking_recursion_limit! {
+            $this.remaining_depth -= 1;
+            if $this.remaining_depth == 0 {
+                return Err($this.peek_error(ErrorCode::RecursionLimitExceeded));
+            }
+        }
+
+        $this $($body)*
+
+        if_checking_recursion_limit! {
+            $this.remaining_depth += 1;
+        }
+    };
+}
+
 impl<'de, 'a, R: Read<'de>> de::Deserializer<'de> for &'a mut Deserializer<R> {
     type Error = Error;
 
@@ -1083,32 +1203,22 @@
                 }
             }
             b'[' => {
-                self.remaining_depth -= 1;
-                if self.remaining_depth == 0 {
-                    return Err(self.peek_error(ErrorCode::RecursionLimitExceeded));
+                check_recursion! {
+                    self.eat_char();
+                    let ret = visitor.visit_seq(SeqAccess::new(self));
                 }
 
-                self.eat_char();
-                let ret = visitor.visit_seq(SeqAccess::new(self));
-
-                self.remaining_depth += 1;
-
                 match (ret, self.end_seq()) {
                     (Ok(ret), Ok(())) => Ok(ret),
                     (Err(err), _) | (_, Err(err)) => Err(err),
                 }
             }
             b'{' => {
-                self.remaining_depth -= 1;
-                if self.remaining_depth == 0 {
-                    return Err(self.peek_error(ErrorCode::RecursionLimitExceeded));
+                check_recursion! {
+                    self.eat_char();
+                    let ret = visitor.visit_map(MapAccess::new(self));
                 }
 
-                self.eat_char();
-                let ret = visitor.visit_map(MapAccess::new(self));
-
-                self.remaining_depth += 1;
-
                 match (ret, self.end_map()) {
                     (Ok(ret), Ok(())) => Ok(ret),
                     (Err(err), _) | (_, Err(err)) => Err(err),
@@ -1326,9 +1436,7 @@
     ///     Ok(())
     /// }
     /// #
-    /// # fn main() {
-    /// #     look_at_bytes().unwrap();
-    /// # }
+    /// # look_at_bytes().unwrap();
     /// ```
     ///
     /// Backslash escape sequences like `\n` are still interpreted and required
@@ -1348,9 +1456,7 @@
     ///     assert_eq!(expected_msg, parsed.unwrap_err().to_string());
     /// }
     /// #
-    /// # fn main() {
-    /// #     look_at_bytes();
-    /// # }
+    /// # look_at_bytes();
     /// ```
     fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value>
     where
@@ -1469,16 +1575,11 @@
 
         let value = match peek {
             b'[' => {
-                self.remaining_depth -= 1;
-                if self.remaining_depth == 0 {
-                    return Err(self.peek_error(ErrorCode::RecursionLimitExceeded));
+                check_recursion! {
+                    self.eat_char();
+                    let ret = visitor.visit_seq(SeqAccess::new(self));
                 }
 
-                self.eat_char();
-                let ret = visitor.visit_seq(SeqAccess::new(self));
-
-                self.remaining_depth += 1;
-
                 match (ret, self.end_seq()) {
                     (Ok(ret), Ok(())) => Ok(ret),
                     (Err(err), _) | (_, Err(err)) => Err(err),
@@ -1525,16 +1626,11 @@
 
         let value = match peek {
             b'{' => {
-                self.remaining_depth -= 1;
-                if self.remaining_depth == 0 {
-                    return Err(self.peek_error(ErrorCode::RecursionLimitExceeded));
+                check_recursion! {
+                    self.eat_char();
+                    let ret = visitor.visit_map(MapAccess::new(self));
                 }
 
-                self.eat_char();
-                let ret = visitor.visit_map(MapAccess::new(self));
-
-                self.remaining_depth += 1;
-
                 match (ret, self.end_map()) {
                     (Ok(ret), Ok(())) => Ok(ret),
                     (Err(err), _) | (_, Err(err)) => Err(err),
@@ -1567,32 +1663,22 @@
 
         let value = match peek {
             b'[' => {
-                self.remaining_depth -= 1;
-                if self.remaining_depth == 0 {
-                    return Err(self.peek_error(ErrorCode::RecursionLimitExceeded));
+                check_recursion! {
+                    self.eat_char();
+                    let ret = visitor.visit_seq(SeqAccess::new(self));
                 }
 
-                self.eat_char();
-                let ret = visitor.visit_seq(SeqAccess::new(self));
-
-                self.remaining_depth += 1;
-
                 match (ret, self.end_seq()) {
                     (Ok(ret), Ok(())) => Ok(ret),
                     (Err(err), _) | (_, Err(err)) => Err(err),
                 }
             }
             b'{' => {
-                self.remaining_depth -= 1;
-                if self.remaining_depth == 0 {
-                    return Err(self.peek_error(ErrorCode::RecursionLimitExceeded));
+                check_recursion! {
+                    self.eat_char();
+                    let ret = visitor.visit_map(MapAccess::new(self));
                 }
 
-                self.eat_char();
-                let ret = visitor.visit_map(MapAccess::new(self));
-
-                self.remaining_depth += 1;
-
                 match (ret, self.end_map()) {
                     (Ok(ret), Ok(())) => Ok(ret),
                     (Err(err), _) | (_, Err(err)) => Err(err),
@@ -1621,16 +1707,11 @@
     {
         match try!(self.parse_whitespace()) {
             Some(b'{') => {
-                self.remaining_depth -= 1;
-                if self.remaining_depth == 0 {
-                    return Err(self.peek_error(ErrorCode::RecursionLimitExceeded));
+                check_recursion! {
+                    self.eat_char();
+                    let value = try!(visitor.visit_enum(VariantAccess::new(self)));
                 }
 
-                self.eat_char();
-                let value = try!(visitor.visit_enum(VariantAccess::new(self)));
-
-                self.remaining_depth += 1;
-
                 match try!(self.parse_whitespace()) {
                     Some(b'}') => {
                         self.eat_char();
@@ -2156,13 +2237,26 @@
 /// as a [`File`], you will want to apply your own buffering because serde_jsonrc
 /// will not buffer the input. See [`std::io::BufReader`].
 ///
+/// It is expected that the input stream ends after the deserialized object.
+/// If the stream does not end, such as in the case of a persistent socket connection,
+/// this function will not return. It is possible instead to deserialize from a prefix of an input
+/// stream without looking for EOF by managing your own [`Deserializer`].
+///
+/// Note that counter to intuition, this function is usually slower than
+/// reading a file completely into memory and then applying [`from_str`]
+/// or [`from_slice`] on it. See [issue #160].
+///
 /// [`File`]: https://doc.rust-lang.org/std/fs/struct.File.html
-/// [`BufReader`]: https://doc.rust-lang.org/std/io/struct.BufReader.html
+/// [`std::io::BufReader`]: https://doc.rust-lang.org/std/io/struct.BufReader.html
+/// [`from_str`]: ./fn.from_str.html
+/// [`from_slice`]: ./fn.from_slice.html
+/// [issue #160]: https://github.com/serde-rs/json/issues/160
 ///
 /// # Example
 ///
+/// Reading the contents of a file.
+///
 /// ```edition2018
-/// # use serde_derive::Deserialize;
 /// use serde::Deserialize;
 ///
 /// use std::error::Error;
@@ -2196,6 +2290,38 @@
 /// }
 /// ```
 ///
+/// Reading from a persistent socket connection.
+///
+/// ```edition2018
+/// use serde::Deserialize;
+///
+/// use std::error::Error;
+/// use std::net::{TcpListener, TcpStream};
+///
+/// #[derive(Deserialize, Debug)]
+/// struct User {
+///     fingerprint: String,
+///     location: String,
+/// }
+///
+/// fn read_user_from_stream(tcp_stream: TcpStream) -> Result<User, Box<dyn Error>> {
+///     let mut de = serde_jsonrc::Deserializer::from_reader(tcp_stream);
+///     let u = User::deserialize(&mut de)?;
+///
+///     Ok(u)
+/// }
+///
+/// fn main() {
+/// # }
+/// # fn fake_main() {
+///     let listener = TcpListener::bind("127.0.0.1:4000").unwrap();
+///
+///     for stream in listener.incoming() {
+///         println!("{:#?}", read_user_from_stream(stream.unwrap()));
+///     }
+/// }
+/// ```
+///
 /// # Errors
 ///
 /// This conversion can fail if the structure of the input does not match the
@@ -2218,7 +2344,6 @@
 /// # Example
 ///
 /// ```edition2018
-/// # use serde_derive::Deserialize;
 /// use serde::Deserialize;
 ///
 /// #[derive(Deserialize, Debug)]
@@ -2261,7 +2386,6 @@
 /// # Example
 ///
 /// ```edition2018
-/// # use serde_derive::Deserialize;
 /// use serde::Deserialize;
 ///
 /// #[derive(Deserialize, Debug)]
diff --git a/src/lib.rs b/src/lib.rs
index f51482c..069441a 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -128,7 +128,6 @@
 //! largely automatically.
 //!
 //! ```edition2018
-//! # use serde_derive::{Deserialize, Serialize};
 //! use serde::{Deserialize, Serialize};
 //! use serde_jsonrc::Result;
 //!
@@ -188,8 +187,7 @@
 //! # Constructing JSON values
 //!
 //! Serde jsonrc provides a [`json!` macro][macro] to build `serde_jsonrc::Value`
-//! objects with very natural JSON syntax. In order to use this macro,
-//! `serde_jsonrc` needs to be imported with the `#[macro_use]` attribute.
+//! objects with very natural JSON syntax.
 //!
 //! ```edition2018
 //! use serde_jsonrc::json;
@@ -252,7 +250,6 @@
 //! such as a File or a TCP stream.
 //!
 //! ```edition2018
-//! # use serde_derive::{Deserialize, Serialize};
 //! use serde::{Deserialize, Serialize};
 //! use serde_jsonrc::Result;
 //!
@@ -303,13 +300,14 @@
 //! [macro]: https://docs.serde.rs/serde_jsonrc/macro.json.html
 //! [`serde-json-core`]: https://japaric.github.io/serde-json-core/serde_jsonrc_core/
 
-#![doc(html_root_url = "https://docs.rs/serde_jsonrc/0.1.0")]
+#![doc(html_root_url = "https://docs.rs/serde_jsonrc/0.1.1")]
+#![allow(unknown_lints, bare_trait_objects, ellipsis_inclusive_range_patterns)]
 #![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
 #![cfg_attr(feature = "cargo-clippy", deny(clippy, clippy_pedantic))]
 // Ignored clippy lints
 #![cfg_attr(
     feature = "cargo-clippy",
-    allow(deprecated_cfg_attr, doc_markdown)
+    allow(deprecated_cfg_attr, doc_markdown, needless_doctest_main)
 )]
 // Ignored clippy_pedantic lints
 #![cfg_attr(feature = "cargo-clippy", allow(
@@ -319,15 +317,21 @@
     cast_possible_wrap,
     cast_precision_loss,
     cast_sign_loss,
+    // correctly used
+    integer_division,
     // things are often more readable this way
     cast_lossless,
     module_name_repetitions,
     shadow_unrelated,
     single_match_else,
+    too_many_lines,
     use_self,
     zero_prefixed_literal,
     // we support older compilers
+    checked_conversions,
     redundant_field_names,
+    // noisy
+    must_use_candidate,
 ))]
 #![deny(missing_docs)]
 
diff --git a/src/map.rs b/src/map.rs
index 05efc51..5e13df9 100644
--- a/src/map.rs
+++ b/src/map.rs
@@ -126,7 +126,10 @@
         String: Borrow<Q>,
         Q: Ord + Eq + Hash,
     {
-        self.map.remove(key)
+        #[cfg(feature = "preserve_order")]
+        return self.map.swap_remove(key);
+        #[cfg(not(feature = "preserve_order"))]
+        return self.map.remove(key);
     }
 
     /// Gets the given key's corresponding entry in the map for in-place
@@ -692,7 +695,10 @@
     /// ```
     #[inline]
     pub fn remove(self) -> Value {
-        self.occupied.remove()
+        #[cfg(feature = "preserve_order")]
+        return self.occupied.swap_remove();
+        #[cfg(not(feature = "preserve_order"))]
+        return self.occupied.remove();
     }
 }
 
diff --git a/src/number.rs b/src/number.rs
index 577b9b2..7ed3066 100644
--- a/src/number.rs
+++ b/src/number.rs
@@ -237,7 +237,7 @@
                 }
                 #[cfg(feature = "arbitrary_precision")]
                 {
-                    ryu::Buffer::new().format(f).to_owned()
+                    ryu::Buffer::new().format_finite(f).to_owned()
                 }
             };
             Some(Number { n: n })
@@ -473,7 +473,7 @@
             } else if let Some(i) = self.as_i64() {
                 return visitor.visit_i64(i);
             } else if let Some(f) = self.as_f64() {
-                if f.to_string() == self.n {
+                if ryu::Buffer::new().format_finite(f) == self.n || f.to_string() == self.n {
                     return visitor.visit_f64(f);
                 }
             }
diff --git a/src/raw.rs b/src/raw.rs
index ca7c134..0aea3f6 100644
--- a/src/raw.rs
+++ b/src/raw.rs
@@ -20,10 +20,19 @@
 /// When serializing, a value of this type will retain its original formatting
 /// and will not be minified or pretty-printed.
 ///
+/// # Note
+///
+/// `RawValue` is only available if serde\_json is built with the `"raw_value"`
+/// feature.
+///
+/// ```toml
+/// [dependencies]
+/// serde_json = { version = "1.0", features = ["raw_value"] }
+/// ```
+///
 /// # Example
 ///
 /// ```edition2018
-/// # use serde_derive::{Deserialize, Serialize};
 /// use serde::{Deserialize, Serialize};
 /// use serde_jsonrc::{Result, value::RawValue};
 ///
@@ -43,7 +52,7 @@
 /// // keys into a single "info" key holding an array of code and payload.
 /// //
 /// // This could be done equivalently using serde_jsonrc::Value as the type for
-/// // payload, but &RawValue will perform netter because it does not require
+/// // payload, but &RawValue will perform better because it does not require
 /// // memory allocation. The correct range of bytes is borrowed from the input
 /// // data and pasted verbatim into the output.
 /// fn rearrange(input: &str) -> Result<String> {
@@ -70,7 +79,7 @@
 /// The typical usage of `RawValue` will be in the borrowed form:
 ///
 /// ```edition2018
-/// # use serde_derive::Deserialize;
+/// # use serde::Deserialize;
 /// # use serde_jsonrc::value::RawValue;
 /// #
 /// #[derive(Deserialize)]
@@ -81,7 +90,7 @@
 /// ```
 ///
 /// The borrowed form is suitable when deserializing through
-/// [`serde_jsonrc::from_str`] and [`serde_json::from_slice`] which support
+/// [`serde_jsonrc::from_str`] and [`serde_jsonrc::from_slice`] which support
 /// borrowing from the input data without memory allocation.
 ///
 /// When deserializing through [`serde_jsonrc::from_reader`] you will need to use
@@ -93,7 +102,7 @@
 /// [`serde_jsonrc::from_reader`]: ../fn.from_reader.html
 ///
 /// ```edition2018
-/// # use serde_derive::Deserialize;
+/// # use serde::Deserialize;
 /// # use serde_jsonrc::value::RawValue;
 /// #
 /// #[derive(Deserialize)]
@@ -101,16 +110,6 @@
 ///     raw_value: Box<RawValue>,
 /// }
 /// ```
-///
-/// # Note
-///
-/// `RawValue` is only available if serde\_json is built with the `"raw_value"`
-/// feature.
-///
-/// ```toml
-/// [dependencies]
-/// serde_jsonrc = { version = "1.0", features = ["raw_value"] }
-/// ```
 #[repr(C)]
 pub struct RawValue {
     json: str,
@@ -185,7 +184,6 @@
     /// # Example
     ///
     /// ```edition2018
-    /// # use serde_derive::Deserialize;
     /// use serde::Deserialize;
     /// use serde_jsonrc::{Result, value::RawValue};
     ///
diff --git a/src/ser.rs b/src/ser.rs
index 463c15a..cfae381 100644
--- a/src/ser.rs
+++ b/src/ser.rs
@@ -1711,7 +1711,7 @@
         W: io::Write,
     {
         let mut buffer = ryu::Buffer::new();
-        let s = buffer.format(value);
+        let s = buffer.format_finite(value);
         writer.write_all(s.as_bytes())
     }
 
@@ -1722,7 +1722,7 @@
         W: io::Write,
     {
         let mut buffer = ryu::Buffer::new();
-        let s = buffer.format(value);
+        let s = buffer.format_finite(value);
         writer.write_all(s.as_bytes())
     }
 
diff --git a/src/value/de.rs b/src/value/de.rs
index a1f40cf..c1f0810 100644
--- a/src/value/de.rs
+++ b/src/value/de.rs
@@ -479,6 +479,14 @@
     }
 }
 
+impl<'de> IntoDeserializer<'de, Error> for Value {
+    type Deserializer = Self;
+
+    fn into_deserializer(self) -> Self::Deserializer {
+        self
+    }
+}
+
 struct VariantDeserializer {
     value: Option<Value>,
 }
diff --git a/src/value/from.rs b/src/value/from.rs
index a322bb7..4b93f09 100644
--- a/src/value/from.rs
+++ b/src/value/from.rs
@@ -211,3 +211,19 @@
         Value::Array(iter.into_iter().map(Into::into).collect())
     }
 }
+
+impl From<()> for Value {
+    /// Convert `()` to `Value`
+    ///
+    /// # Examples
+    ///
+    /// ```edition2018
+    /// use serde_jsonrc::Value;
+    ///
+    /// let u = ();
+    /// let x: Value = u.into();
+    /// ```
+    fn from((): ()) -> Self {
+        Value::Null
+    }
+}
diff --git a/src/value/mod.rs b/src/value/mod.rs
index 9564c61..b61d358 100644
--- a/src/value/mod.rs
+++ b/src/value/mod.rs
@@ -3,8 +3,7 @@
 //! # Constructing JSON
 //!
 //! Serde jsonrc provides a [`json!` macro][macro] to build `serde_jsonrc::Value`
-//! objects with very natural JSON syntax. In order to use this macro,
-//! `serde_jsonrc` needs to be imported with the `#[macro_use]` attribute.
+//! objects with very natural JSON syntax.
 //!
 //! ```edition2018
 //! use serde_jsonrc::json;
@@ -83,9 +82,7 @@
 //!     Ok(())
 //! }
 //! #
-//! # fn main() {
-//! #     untyped_example().unwrap();
-//! # }
+//! # untyped_example().unwrap();
 //! ```
 //!
 //! [macro]: https://docs.serde.rs/serde_jsonrc/macro.json.html
@@ -865,7 +862,7 @@
 /// # Examples
 ///
 /// ```edition2018
-/// # use serde_derive::Deserialize;
+/// # use serde::Deserialize;
 /// use serde_jsonrc::Value;
 ///
 /// #[derive(Deserialize)]
@@ -885,9 +882,7 @@
 /// #     Ok(())
 /// # }
 /// #
-/// # fn main() {
-/// #     try_main().unwrap()
-/// # }
+/// # try_main().unwrap()
 /// ```
 impl Default for Value {
     fn default() -> Value {
@@ -907,7 +902,6 @@
 /// # Example
 ///
 /// ```edition2018
-/// # use serde_derive::Serialize;
 /// use serde::Serialize;
 /// use serde_jsonrc::json;
 ///
@@ -937,9 +931,7 @@
 ///     Ok(())
 /// }
 /// #
-/// # fn main() {
-/// #     compare_json_values().unwrap();
-/// # }
+/// # compare_json_values().unwrap();
 /// ```
 ///
 /// # Errors
@@ -972,7 +964,6 @@
 /// # Example
 ///
 /// ```edition2018
-/// # use serde_derive::Deserialize;
 /// use serde::Deserialize;
 /// use serde_jsonrc::json;
 ///
diff --git a/tests/compiletest.rs b/tests/compiletest.rs
index d6f4866..f9aea23 100644
--- a/tests/compiletest.rs
+++ b/tests/compiletest.rs
@@ -1,18 +1,6 @@
-extern crate compiletest_rs as compiletest;
-
+#[rustversion::attr(not(nightly), ignore)]
 #[test]
 fn ui() {
-    compiletest::run_tests(&compiletest::Config {
-        mode: compiletest::common::Mode::Ui,
-        src_base: std::path::PathBuf::from("tests/ui"),
-        target_rustcflags: Some(String::from(
-            "\
-             --edition=2018 \
-             -L tests/deps/target/debug/deps \
-             -Z unstable-options \
-             --extern serde_jsonrc \
-             ",
-        )),
-        ..Default::default()
-    });
+    let t = trybuild::TestCases::new();
+    t.compile_fail("tests/ui/*.rs");
 }
diff --git a/tests/crate/Cargo.toml b/tests/crate/Cargo.toml
new file mode 100644
index 0000000..d09de09
--- /dev/null
+++ b/tests/crate/Cargo.toml
@@ -0,0 +1,16 @@
+[package]
+name = "serde_json_test"
+version = "0.0.0"
+publish = false
+
+[lib]
+path = "test.rs"
+
+[dependencies]
+serde_jsonrc = { path = "../.." }
+
+[features]
+arbitrary_precision = ["serde_jsonrc/arbitrary_precision"]
+preserve_order = ["serde_jsonrc/preserve_order"]
+raw_value = ["serde_jsonrc/raw_value"]
+unbounded_depth = ["serde_jsonrc/unbounded_depth"]
diff --git a/tests/crate/test.rs b/tests/crate/test.rs
new file mode 100644
index 0000000..d1ee3b8
--- /dev/null
+++ b/tests/crate/test.rs
@@ -0,0 +1,2 @@
+extern crate serde_jsonrc;
+pub use serde_jsonrc::*;
diff --git a/tests/deps/Cargo.toml b/tests/deps/Cargo.toml
deleted file mode 100644
index 213850c..0000000
--- a/tests/deps/Cargo.toml
+++ /dev/null
@@ -1,13 +0,0 @@
-[package]
-name = "serde_test_suite_deps"
-version = "0.0.0"
-authors = ["David Tolnay <dtolnay@gmail.com>"]
-publish = false
-
-[workspace]
-
-[features]
-arbitrary_precision = ["serde_jsonrc/arbitrary_precision"]
-
-[dependencies]
-serde_jsonrc = { path = "../.." }
diff --git a/tests/deps/src/lib.rs b/tests/deps/src/lib.rs
deleted file mode 100644
index e63ee9d..0000000
--- a/tests/deps/src/lib.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-#![feature(/*=============================================]
-#![===  Serde test suite requires a nightly compiler.  ===]
-#![====================================================*/)]
diff --git a/tests/regression.rs b/tests/regression.rs
new file mode 100644
index 0000000..eff29ff
--- /dev/null
+++ b/tests/regression.rs
@@ -0,0 +1,6 @@
+extern crate automod;
+extern crate serde;
+extern crate serde_derive;
+
+#[path = "regression/mod.rs"]
+mod regression;
diff --git a/tests/regression/issue520.rs b/tests/regression/issue520.rs
new file mode 100644
index 0000000..c2067b8
--- /dev/null
+++ b/tests/regression/issue520.rs
@@ -0,0 +1,18 @@
+use serde_derive::{Serialize, Deserialize};
+
+#[derive(Serialize, Deserialize, Debug)]
+#[serde(tag = "type", content = "data")]
+enum E {
+    Float(f32),
+}
+
+#[test]
+fn test() {
+    let e = E::Float(159.1);
+    let v = serde_jsonrc::to_value(e).unwrap();
+    let e = serde_jsonrc::from_value::<E>(v).unwrap();
+
+    match e {
+        E::Float(f) => assert_eq!(f, 159.1),
+    }
+}
diff --git a/tests/regression/mod.rs b/tests/regression/mod.rs
new file mode 100644
index 0000000..830175f
--- /dev/null
+++ b/tests/regression/mod.rs
@@ -0,0 +1 @@
+automod::dir!("tests/regression");
diff --git a/tests/stream.rs b/tests/stream.rs
index fe0aa73..9638e48 100644
--- a/tests/stream.rs
+++ b/tests/stream.rs
@@ -82,6 +82,36 @@
 }
 
 #[test]
+fn test_json_stream_truncated_decimal() {
+    let data = "{\"x\":4.";
+
+    test_stream!(data, Value, |stream| {
+        assert!(stream.next().unwrap().unwrap_err().is_eof());
+        assert_eq!(stream.byte_offset(), 0);
+    });
+}
+
+#[test]
+fn test_json_stream_truncated_negative() {
+    let data = "{\"x\":-";
+
+    test_stream!(data, Value, |stream| {
+        assert!(stream.next().unwrap().unwrap_err().is_eof());
+        assert_eq!(stream.byte_offset(), 0);
+    });
+}
+
+#[test]
+fn test_json_stream_truncated_exponent() {
+    let data = "{\"x\":4e";
+
+    test_stream!(data, Value, |stream| {
+        assert!(stream.next().unwrap().unwrap_err().is_eof());
+        assert_eq!(stream.byte_offset(), 0);
+    });
+}
+
+#[test]
 fn test_json_stream_empty() {
     let data = "";
 
diff --git a/tests/test.rs b/tests/test.rs
index 83d0f8b..c9f9070 100644
--- a/tests/test.rs
+++ b/tests/test.rs
@@ -26,7 +26,7 @@
 use std::{i16, i32, i64, i8};
 use std::{u16, u32, u64, u8};
 
-use serde::de::{self, Deserialize, IgnoredAny};
+use serde::de::{self, Deserialize, IgnoredAny, IntoDeserializer};
 use serde::ser::{self, Serialize, Serializer};
 
 use serde_bytes::{ByteBuf, Bytes};
@@ -737,15 +737,15 @@
     test_parse_err::<f64>(&[
         ("+", "expected value at line 1 column 1"),
         (".", "expected value at line 1 column 1"),
-        ("-", "invalid number at line 1 column 1"),
+        ("-", "EOF while parsing a value at line 1 column 1"),
         ("00", "invalid number at line 1 column 2"),
         ("0x80", "trailing characters at line 1 column 2"),
         ("\\0", "expected value at line 1 column 1"),
-        ("1.", "invalid number at line 1 column 2"),
+        ("1.", "EOF while parsing a value at line 1 column 2"),
         ("1.a", "invalid number at line 1 column 3"),
         ("1.e1", "invalid number at line 1 column 3"),
-        ("1e", "invalid number at line 1 column 2"),
-        ("1e+", "invalid number at line 1 column 3"),
+        ("1e", "EOF while parsing a value at line 1 column 2"),
+        ("1e+", "EOF while parsing a value at line 1 column 3"),
         ("1a", "trailing characters at line 1 column 2"),
         (
             "100e777777777777777777777777777",
@@ -1388,9 +1388,9 @@
             S: ser::Serializer,
         {
             use serde::ser::SerializeSeq;
-            let mut seq = try!(serializer.serialize_seq(None));
+            let mut seq = serializer.serialize_seq(None)?;
             for elem in &self.0 {
-                try!(seq.serialize_element(elem));
+                seq.serialize_element(elem)?;
             }
             seq.end()
         }
@@ -1425,7 +1425,7 @@
         {
             let mut values = Vec::new();
 
-            while let Some(value) = try!(visitor.next_element()) {
+            while let Some(value) = visitor.next_element()? {
                 values.push(value);
             }
 
@@ -1475,10 +1475,10 @@
             S: ser::Serializer,
         {
             use serde::ser::SerializeMap;
-            let mut map = try!(serializer.serialize_map(None));
+            let mut map = serializer.serialize_map(None)?;
             for (k, v) in &self.0 {
-                try!(map.serialize_key(k));
-                try!(map.serialize_value(v));
+                map.serialize_key(k)?;
+                map.serialize_value(v)?;
             }
             map.end()
         }
@@ -1514,7 +1514,7 @@
         {
             let mut values = BTreeMap::new();
 
-            while let Some((key, value)) = try!(visitor.next_entry()) {
+            while let Some((key, value)) = visitor.next_entry()? {
                 values.insert(key, value);
             }
 
@@ -1760,11 +1760,24 @@
         .collect();
     let _: Value = from_str(&brackets).unwrap();
 
-    let brackets: String = iter::repeat('[').take(128).collect();
+    let brackets: String = iter::repeat('[').take(129).collect();
     test_parse_err::<Value>(&[(&brackets, "recursion limit exceeded at line 1 column 128")]);
 }
 
 #[test]
+#[cfg(feature = "unbounded_depth")]
+fn test_disable_recursion_limit() {
+    let brackets: String = iter::repeat('[')
+        .take(140)
+        .chain(iter::repeat(']').take(140))
+        .collect();
+
+    let mut deserializer = Deserializer::from_str(&brackets);
+    deserializer.disable_recursion_limit();
+    Value::deserialize(&mut deserializer).unwrap();
+}
+
+#[test]
 fn test_integer_key() {
     // map with integer keys
     let map = treemap!(
@@ -2166,3 +2179,22 @@
     let value = json!({ "map": { "1": null } });
     Outer::deserialize(&value).unwrap();
 }
+
+#[test]
+fn test_value_into_deserializer() {
+    #[derive(Deserialize)]
+    struct Outer {
+        inner: Inner,
+    }
+
+    #[derive(Deserialize)]
+    struct Inner {
+        string: String,
+    }
+
+    let mut map = BTreeMap::new();
+    map.insert("inner", json!({ "string": "Hello World" }));
+
+    let outer = Outer::deserialize(map.into_deserializer()).unwrap();
+    assert_eq!(outer.inner.string, "Hello World");
+}
diff --git a/tests/ui/missing_colon.stderr b/tests/ui/missing_colon.stderr
index eec0ebd..8b8b5b5 100644
--- a/tests/ui/missing_colon.stderr
+++ b/tests/ui/missing_colon.stderr
@@ -5,6 +5,3 @@
   |     ^^^^^^^^^^^^^^^ missing tokens in macro arguments
   |
   = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-
-error: aborting due to previous error
-
diff --git a/tests/ui/missing_value.stderr b/tests/ui/missing_value.stderr
index fc5a46d..7e223a1 100644
--- a/tests/ui/missing_value.stderr
+++ b/tests/ui/missing_value.stderr
@@ -5,6 +5,3 @@
   |     ^^^^^^^^^^^^^^^^^ missing tokens in macro arguments
   |
   = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-
-error: aborting due to previous error
-
diff --git a/tests/ui/not_found.stderr b/tests/ui/not_found.stderr
index 6b39db1..c62bbe7 100644
--- a/tests/ui/not_found.stderr
+++ b/tests/ui/not_found.stderr
@@ -4,6 +4,4 @@
 4 |     json!({ "a" : x });
   |                   ^ not found in this scope
 
-error: aborting due to previous error
-
 For more information about this error, try `rustc --explain E0425`.
diff --git a/tests/ui/parse_expr.stderr b/tests/ui/parse_expr.stderr
index c182361..2c66d2f 100644
--- a/tests/ui/parse_expr.stderr
+++ b/tests/ui/parse_expr.stderr
@@ -5,6 +5,3 @@
   |     ^^^^^^^^^^^^^^^^^^^ missing tokens in macro arguments
   |
   = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-
-error: aborting due to previous error
-
diff --git a/tests/ui/parse_key.stderr b/tests/ui/parse_key.stderr
index c91afcd..7e70b11 100644
--- a/tests/ui/parse_key.stderr
+++ b/tests/ui/parse_key.stderr
@@ -4,6 +4,4 @@
 4 |     json!({ "".s : true });
   |                ^
 
-error: aborting due to previous error
-
 For more information about this error, try `rustc --explain E0609`.
diff --git a/tests/ui/unexpected_after_array_element.stderr b/tests/ui/unexpected_after_array_element.stderr
index 835229b..3708992 100644
--- a/tests/ui/unexpected_after_array_element.stderr
+++ b/tests/ui/unexpected_after_array_element.stderr
@@ -3,6 +3,3 @@
   |
 4 |     json!([ true => ]);
   |                  ^^ no rules expected this token in macro call
-
-error: aborting due to previous error
-
diff --git a/tests/ui/unexpected_after_map_entry.stderr b/tests/ui/unexpected_after_map_entry.stderr
index 02484c8..60f9815 100644
--- a/tests/ui/unexpected_after_map_entry.stderr
+++ b/tests/ui/unexpected_after_map_entry.stderr
@@ -3,6 +3,3 @@
   |
 4 |     json!({ "k": true => });
   |                       ^^ no rules expected this token in macro call
-
-error: aborting due to previous error
-
diff --git a/tests/ui/unexpected_colon.stderr b/tests/ui/unexpected_colon.stderr
index 495bbd9..2708b08 100644
--- a/tests/ui/unexpected_colon.stderr
+++ b/tests/ui/unexpected_colon.stderr
@@ -3,6 +3,3 @@
   |
 4 |     json!({ : true });
   |             ^ no rules expected this token in macro call
-
-error: aborting due to previous error
-
diff --git a/tests/ui/unexpected_comma.stderr b/tests/ui/unexpected_comma.stderr
index c6d2b66..65e0453 100644
--- a/tests/ui/unexpected_comma.stderr
+++ b/tests/ui/unexpected_comma.stderr
@@ -3,6 +3,3 @@
   |
 4 |     json!({ "a" , "b": true });
   |                 ^ no rules expected this token in macro call
-
-error: aborting due to previous error
-
diff --git a/tests/ui/update-references.sh b/tests/ui/update-references.sh
deleted file mode 100755
index aa99d35..0000000
--- a/tests/ui/update-references.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-# file at the top-level directory of this distribution and at
-# http://rust-lang.org/COPYRIGHT.
-#
-# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-# option. This file may not be copied, modified, or distributed
-# except according to those terms.
-
-# A script to update the references for particular tests. The idea is
-# that you do a run, which will generate files in the build directory
-# containing the (normalized) actual output of the compiler. This
-# script will then copy that output and replace the "expected output"
-# files. You can then commit the changes.
-#
-# If you find yourself manually editing a foo.stderr file, you're
-# doing it wrong.
-
-if [[ "$1" == "--help" || "$1" == "-h" || "$1" == "" || "$2" == "" ]]; then
-    echo "usage: $0 <build-directory> <relative-path-to-rs-files>"
-    echo ""
-    echo "For example:"
-    echo "   $0 ../../../build/x86_64-apple-darwin/test/ui *.rs */*.rs"
-fi
-
-MYDIR=$(dirname $0)
-
-BUILD_DIR="$1"
-shift
-
-while [[ "$1" != "" ]]; do
-    STDERR_NAME="${1/%.rs/.stderr}"
-    STDOUT_NAME="${1/%.rs/.stdout}"
-    shift
-    if [ -f $BUILD_DIR/$STDOUT_NAME ] && \
-           ! (diff $BUILD_DIR/$STDOUT_NAME $MYDIR/$STDOUT_NAME >& /dev/null); then
-        echo updating $MYDIR/$STDOUT_NAME
-        cp $BUILD_DIR/$STDOUT_NAME $MYDIR/$STDOUT_NAME
-    fi
-    if [ -f $BUILD_DIR/$STDERR_NAME ] && \
-           ! (diff $BUILD_DIR/$STDERR_NAME $MYDIR/$STDERR_NAME >& /dev/null); then
-        echo updating $MYDIR/$STDERR_NAME
-        cp $BUILD_DIR/$STDERR_NAME $MYDIR/$STDERR_NAME
-    fi
-done
-
-