Auto merge of #85993 - bjorn3:serde_json, r=wesleywiser

Remove all json handling from rustc_serialize

Json is now handled using serde_json. Where appropriate I have replaced json usage with binary serialization (rmeta files) or manual string formatting (emcc linker arg generation).

This allowed for removing and simplifying a lot of code, which hopefully results in faster serialization/deserialization and faster compiles of rustc itself.

Where sensible we now use serde. Metadata and incr cache serialization keeps using a heavily modified (compared to crates.io) rustc-serialize version that in the future could probably be extended with zero-copy deserialization or other perf tricks that serde can't support due to supporting more than one serialization format.

Note that I had to remove `-Zast-json` and `-Zast-json-noexpand` as the relevant AST types don't implement `serde::Serialize`.

Fixes #40177

See also https://github.com/rust-lang/compiler-team/issues/418
This commit is contained in:
bors
2022-06-03 17:55:02 +00:00
48 changed files with 598 additions and 4230 deletions
+6
View File
@@ -3686,6 +3686,7 @@ dependencies = [
"rustc_span",
"rustc_symbol_mangling",
"rustc_target",
"serde_json",
"smallvec",
"snap",
"tempfile",
@@ -3774,6 +3775,7 @@ dependencies = [
"rustc_span",
"rustc_target",
"rustc_typeck",
"serde_json",
"tracing",
"winapi",
]
@@ -3809,6 +3811,8 @@ dependencies = [
"rustc_macros",
"rustc_serialize",
"rustc_span",
"serde",
"serde_json",
"termcolor",
"termize",
"tracing",
@@ -4024,6 +4028,7 @@ dependencies = [
"rustc_serialize",
"rustc_span",
"rustc_target",
"serde",
]
[[package]]
@@ -4445,6 +4450,7 @@ dependencies = [
"rustc_macros",
"rustc_serialize",
"rustc_span",
"serde_json",
"tracing",
]
+2 -5
View File
@@ -41,9 +41,6 @@
use std::fmt;
use std::mem;
#[cfg(test)]
mod tests;
/// A "Label" is an identifier of some point in sources,
/// e.g. in the following code:
///
@@ -2476,8 +2473,8 @@ pub struct AttrId {
}
impl<S: Encoder> rustc_serialize::Encodable<S> for AttrId {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
-11
View File
@@ -1,11 +0,0 @@
use super::*;
// Are ASTs encodable?
#[test]
fn check_asts_encodable() {
fn assert_encodable<
T: for<'a> rustc_serialize::Encodable<rustc_serialize::json::Encoder<'a>>,
>() {
}
assert_encodable::<Crate>();
}
+1
View File
@@ -16,6 +16,7 @@ jobserver = "0.1.22"
tempfile = "3.2"
thorin-dwp = "0.2"
pathdiff = "0.2.0"
serde_json = "1.0.59"
snap = "1"
smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
regex = "1.4"
+5 -15
View File
@@ -14,7 +14,6 @@
use rustc_middle::middle::dependency_format::Linkage;
use rustc_middle::middle::exported_symbols::{ExportedSymbol, SymbolExportInfo, SymbolExportKind};
use rustc_middle::ty::TyCtxt;
use rustc_serialize::{json, Encoder};
use rustc_session::config::{self, CrateType, DebugInfo, LinkerPluginLto, Lto, OptLevel, Strip};
use rustc_session::Session;
use rustc_span::symbol::Symbol;
@@ -1152,21 +1151,12 @@ fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType, symbols: &[
self.cmd.arg("-s");
let mut arg = OsString::from("EXPORTED_FUNCTIONS=");
let mut encoded = String::new();
{
let mut encoder = json::Encoder::new(&mut encoded);
let res = encoder.emit_seq(symbols.len(), |encoder| {
for (i, sym) in symbols.iter().enumerate() {
encoder.emit_seq_elt(i, |encoder| encoder.emit_str(&("_".to_owned() + sym)))?;
}
Ok(())
});
if let Err(e) = res {
self.sess.fatal(&format!("failed to encode exported symbols: {}", e));
}
}
let encoded = serde_json::to_string(
&symbols.iter().map(|sym| "_".to_owned() + sym).collect::<Vec<_>>(),
)
.unwrap();
debug!("{}", encoded);
arg.push(encoded);
self.cmd.arg(arg);
+1
View File
@@ -9,6 +9,7 @@ crate-type = ["dylib"]
[dependencies]
libc = "0.2"
tracing = { version = "0.1.28" }
serde_json = "1.0.59"
rustc_log = { path = "../rustc_log" }
rustc_middle = { path = "../rustc_middle" }
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
+6 -7
View File
@@ -30,7 +30,6 @@
use rustc_metadata::locator;
use rustc_save_analysis as save;
use rustc_save_analysis::DumpHandler;
use rustc_serialize::json::ToJson;
use rustc_session::config::{nightly_options, CG_OPTIONS, DB_OPTIONS};
use rustc_session::config::{ErrorOutputType, Input, OutputType, PrintRequest, TrimmedDefPaths};
use rustc_session::cstore::MetadataLoader;
@@ -40,6 +39,7 @@
use rustc_session::{early_error, early_error_no_abort, early_warn};
use rustc_span::source_map::{FileLoader, FileName};
use rustc_span::symbol::sym;
use rustc_target::json::ToJson;
use std::borrow::Cow;
use std::cmp::max;
@@ -343,10 +343,7 @@ fn run_compiler(
return early_exit();
}
if sess.opts.debugging_opts.parse_only
|| sess.opts.debugging_opts.show_span.is_some()
|| sess.opts.debugging_opts.ast_json_noexpand
{
if sess.opts.debugging_opts.parse_only || sess.opts.debugging_opts.show_span.is_some() {
return early_exit();
}
@@ -375,7 +372,7 @@ fn run_compiler(
queries.global_ctxt()?;
if sess.opts.debugging_opts.no_analysis || sess.opts.debugging_opts.ast_json {
if sess.opts.debugging_opts.no_analysis {
return early_exit();
}
@@ -665,7 +662,9 @@ fn print_crate_info(
}
Sysroot => println!("{}", sess.sysroot.display()),
TargetLibdir => println!("{}", sess.target_tlib_path.dir.display()),
TargetSpec => println!("{}", sess.target.to_json().pretty()),
TargetSpec => {
println!("{}", serde_json::to_string_pretty(&sess.target.to_json()).unwrap());
}
FileNames | CrateName => {
let input = input.unwrap_or_else(|| {
early_error(ErrorOutputType::default(), "no input file provided")
+2
View File
@@ -19,6 +19,8 @@ atty = "0.2"
termcolor = "1.0"
annotate-snippets = "0.8.0"
termize = "0.1.1"
serde = { version = "1.0.125", features = ["derive"] }
serde_json = "1.0.59"
[target.'cfg(windows)'.dependencies]
winapi = { version = "0.3", features = ["handleapi", "synchapi", "winbase"] }
+18 -18
View File
@@ -28,7 +28,7 @@
use std::sync::{Arc, Mutex};
use std::vec;
use rustc_serialize::json::{as_json, as_pretty_json};
use serde::Serialize;
#[cfg(test)]
mod tests;
@@ -126,9 +126,9 @@ impl Emitter for JsonEmitter {
fn emit_diagnostic(&mut self, diag: &crate::Diagnostic) {
let data = Diagnostic::from_errors_diagnostic(diag, self);
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&data).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string(&data).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@@ -139,9 +139,9 @@ fn emit_diagnostic(&mut self, diag: &crate::Diagnostic) {
fn emit_artifact_notification(&mut self, path: &Path, artifact_type: &str) {
let data = ArtifactNotification { artifact: path, emit: artifact_type };
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&data).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string(&data).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@@ -161,9 +161,9 @@ fn emit_future_breakage_report(&mut self, diags: Vec<crate::Diagnostic>) {
.collect();
let report = FutureIncompatReport { future_incompat_report: data };
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&report))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&report).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&report))
writeln!(&mut self.dst, "{}", serde_json::to_string(&report).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@@ -175,9 +175,9 @@ fn emit_unused_externs(&mut self, lint_level: rustc_lint_defs::Level, unused_ext
let lint_level = lint_level.as_str();
let data = UnusedExterns { lint_level, unused_extern_names: unused_externs };
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string_pretty(&data).unwrap())
} else {
writeln!(&mut self.dst, "{}", as_json(&data))
writeln!(&mut self.dst, "{}", serde_json::to_string(&data).unwrap())
}
.and_then(|_| self.dst.flush());
if let Err(e) = result {
@@ -204,7 +204,7 @@ fn should_show_explain(&self) -> bool {
// The following data types are provided just for serialisation.
#[derive(Encodable)]
#[derive(Serialize)]
struct Diagnostic {
/// The primary error message.
message: String,
@@ -218,7 +218,7 @@ struct Diagnostic {
rendered: Option<String>,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticSpan {
file_name: String,
byte_start: u32,
@@ -245,7 +245,7 @@ struct DiagnosticSpan {
expansion: Option<Box<DiagnosticSpanMacroExpansion>>,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticSpanLine {
text: String,
@@ -255,7 +255,7 @@ struct DiagnosticSpanLine {
highlight_end: usize,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticSpanMacroExpansion {
/// span where macro was applied to generate this code; note that
/// this may itself derive from a macro (if
@@ -269,7 +269,7 @@ struct DiagnosticSpanMacroExpansion {
def_site_span: DiagnosticSpan,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct DiagnosticCode {
/// The code itself.
code: String,
@@ -277,7 +277,7 @@ struct DiagnosticCode {
explanation: Option<&'static str>,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct ArtifactNotification<'a> {
/// The path of the artifact.
artifact: &'a Path,
@@ -285,12 +285,12 @@ struct ArtifactNotification<'a> {
emit: &'a str,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct FutureBreakageItem {
diagnostic: Diagnostic,
}
#[derive(Encodable)]
#[derive(Serialize)]
struct FutureIncompatReport {
future_incompat_report: Vec<FutureBreakageItem>,
}
@@ -299,7 +299,7 @@ struct FutureIncompatReport {
// doctest component (as well as cargo).
// We could unify this struct the one in rustdoc but they have different
// ownership semantics, so doing so would create wasteful allocations.
#[derive(Encodable)]
#[derive(Serialize)]
struct UnusedExterns<'a, 'b, 'c> {
/// The severity level of the unused dependencies lint
lint_level: &'a str,
+12 -14
View File
@@ -5,12 +5,18 @@
use crate::emitter::{ColorConfig, HumanReadableErrorType};
use crate::Handler;
use rustc_serialize::json;
use rustc_span::{BytePos, Span};
use std::str;
#[derive(Debug, PartialEq, Eq)]
use serde::Deserialize;
#[derive(Deserialize, Debug, PartialEq, Eq)]
struct TestData {
spans: Vec<SpanTestData>,
}
#[derive(Deserialize, Debug, PartialEq, Eq)]
struct SpanTestData {
pub byte_start: u32,
pub byte_end: u32,
@@ -61,19 +67,11 @@ fn test_positions(code: &str, span: (u32, u32), expected_output: SpanTestData) {
let bytes = output.lock().unwrap();
let actual_output = str::from_utf8(&bytes).unwrap();
let actual_output = json::from_str(&actual_output).unwrap();
let spans = actual_output["spans"].as_array().unwrap();
let actual_output: TestData = serde_json::from_str(actual_output).unwrap();
let spans = actual_output.spans;
assert_eq!(spans.len(), 1);
let obj = &spans[0];
let actual_output = SpanTestData {
byte_start: obj["byte_start"].as_u64().unwrap() as u32,
byte_end: obj["byte_end"].as_u64().unwrap() as u32,
line_start: obj["line_start"].as_u64().unwrap() as u32,
line_end: obj["line_end"].as_u64().unwrap() as u32,
column_start: obj["column_start"].as_u64().unwrap() as u32,
column_end: obj["column_end"].as_u64().unwrap() as u32,
};
assert_eq!(expected_output, actual_output);
assert_eq!(expected_output, spans[0])
})
}
-9
View File
@@ -27,7 +27,6 @@
use rustc_plugin_impl as plugin;
use rustc_query_impl::{OnDiskCache, Queries as TcxQueries};
use rustc_resolve::{Resolver, ResolverArenas};
use rustc_serialize::json;
use rustc_session::config::{CrateType, Input, OutputFilenames, OutputType};
use rustc_session::cstore::{MetadataLoader, MetadataLoaderDyn};
use rustc_session::output::{filename_for_input, filename_for_metadata};
@@ -59,10 +58,6 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
}
})?;
if sess.opts.debugging_opts.ast_json_noexpand {
println!("{}", json::as_json(&krate));
}
if sess.opts.debugging_opts.input_stats {
eprintln!("Lines of code: {}", sess.source_map().count_lines());
eprintln!("Pre-expansion node count: {}", count_nodes(&krate));
@@ -423,10 +418,6 @@ pub fn configure_and_expand(
hir_stats::print_ast_stats(&krate, "POST EXPANSION AST STATS");
}
if sess.opts.debugging_opts.ast_json {
println!("{}", json::as_json(&krate));
}
resolver.resolve_crate(&krate);
// Needs to go *after* expansion to be able to check the results of macro expansion.
-2
View File
@@ -644,8 +644,6 @@ macro_rules! untracked {
// Make sure that changing an [UNTRACKED] option leaves the hash unchanged.
// This list is in alphabetical order.
untracked!(assert_incr_state, Some(String::from("loaded")));
untracked!(ast_json, true);
untracked!(ast_json_noexpand, true);
untracked!(borrowck, String::from("other"));
untracked!(deduplicate_diagnostics, false);
untracked!(dep_tasks, true);
+1
View File
@@ -4,6 +4,7 @@ version = "0.0.0"
edition = "2021"
[dependencies]
serde = { version = "1.0.125", features = ["derive"] }
rustc_ast = { path = "../rustc_ast" }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_error_messages = { path = "../rustc_error_messages" }
+3 -1
View File
@@ -14,6 +14,8 @@
use rustc_span::{sym, symbol::Ident, Span, Symbol};
use rustc_target::spec::abi::Abi;
use serde::{Deserialize, Serialize};
pub mod builtin;
#[macro_export]
@@ -34,7 +36,7 @@ macro_rules! pluralize {
/// All suggestions are marked with an `Applicability`. Tools use the applicability of a suggestion
/// to determine whether it should be automatically applied or if the user should be consulted
/// before applying the suggestion.
#[derive(Copy, Clone, Debug, PartialEq, Hash, Encodable, Decodable)]
#[derive(Copy, Clone, Debug, PartialEq, Hash, Encodable, Decodable, Serialize, Deserialize)]
pub enum Applicability {
/// The suggestion is definitely what the user intended, or maintains the exact meaning of the code.
/// This suggestion should be automatically applied.
+9 -35
View File
@@ -140,79 +140,56 @@ fn encodable_body(
let encode_body = match s.variants() {
[_] => {
let mut field_idx = 0usize;
let encode_inner = s.each_variant(|vi| {
vi.bindings()
.iter()
.map(|binding| {
let bind_ident = &binding.binding;
let field_name = binding
.ast()
.ident
.as_ref()
.map_or_else(|| field_idx.to_string(), |i| i.to_string());
let first = field_idx == 0;
let result = quote! {
match ::rustc_serialize::Encoder::emit_struct_field(
match ::rustc_serialize::Encodable::<#encoder_ty>::encode(
#bind_ident,
__encoder,
#field_name,
#first,
|__encoder|
::rustc_serialize::Encodable::<#encoder_ty>::encode(#bind_ident, __encoder),
) {
::std::result::Result::Ok(()) => (),
::std::result::Result::Err(__err)
=> return ::std::result::Result::Err(__err),
}
};
field_idx += 1;
result
})
.collect::<TokenStream>()
});
let no_fields = field_idx == 0;
quote! {
::rustc_serialize::Encoder::emit_struct(__encoder, #no_fields, |__encoder| {
::std::result::Result::Ok(match *self { #encode_inner })
})
::std::result::Result::Ok(match *self { #encode_inner })
}
}
_ => {
let mut variant_idx = 0usize;
let encode_inner = s.each_variant(|vi| {
let variant_name = vi.ast().ident.to_string();
let mut field_idx = 0usize;
let encode_fields: TokenStream = vi
.bindings()
.iter()
.map(|binding| {
let bind_ident = &binding.binding;
let first = field_idx == 0;
let result = quote! {
match ::rustc_serialize::Encoder::emit_enum_variant_arg(
match ::rustc_serialize::Encodable::<#encoder_ty>::encode(
#bind_ident,
__encoder,
#first,
|__encoder|
::rustc_serialize::Encodable::<#encoder_ty>::encode(#bind_ident, __encoder),
) {
::std::result::Result::Ok(()) => (),
::std::result::Result::Err(__err)
=> return ::std::result::Result::Err(__err),
}
};
field_idx += 1;
result
})
.collect();
let result = if field_idx != 0 {
let result = if !vi.bindings().is_empty() {
quote! {
::rustc_serialize::Encoder::emit_enum_variant(
__encoder,
#variant_name,
#variant_idx,
#field_idx,
|__encoder| { ::std::result::Result::Ok({ #encode_fields }) }
)
}
@@ -220,7 +197,6 @@ fn encodable_body(
quote! {
::rustc_serialize::Encoder::emit_fieldless_enum_variant::<#variant_idx>(
__encoder,
#variant_name,
)
}
};
@@ -228,11 +204,9 @@ fn encodable_body(
result
});
quote! {
::rustc_serialize::Encoder::emit_enum(__encoder, |__encoder| {
match *self {
#encode_inner
}
})
match *self {
#encode_inner
}
}
}
};
@@ -95,11 +95,6 @@ macro_rules! encoder_methods {
impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
type Error = <opaque::Encoder as Encoder>::Error;
#[inline]
fn emit_unit(&mut self) -> Result<(), Self::Error> {
Ok(())
}
encoder_methods! {
emit_usize(usize);
emit_u128(u128);
@@ -315,7 +315,7 @@ pub fn to_bits_or_ptr_internal(
ScalarSizeMismatch { target_size: target_size.bytes(), data_size: size.bytes() }
})?),
Scalar::Ptr(ptr, sz) => {
if target_size.bytes() != sz.into() {
if target_size.bytes() != u64::from(sz) {
return Err(ScalarSizeMismatch {
target_size: target_size.bytes(),
data_size: sz.into(),
@@ -56,8 +56,8 @@ pub(super) fn compute(
impl<S: serialize::Encoder> serialize::Encodable<S> for PredecessorCache {
#[inline]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
@@ -56,8 +56,8 @@ pub(super) fn compute(
impl<S: serialize::Encoder> serialize::Encodable<S> for SwitchSourceCache {
#[inline]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
+2 -2
View File
@@ -367,8 +367,8 @@ pub(super) fn compute(&self, body: &Body<'_>) -> &Vec<BasicBlock> {
impl<S: serialize::Encoder> serialize::Encodable<S> for PostorderCache {
#[inline]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
@@ -993,11 +993,6 @@ impl<'a, 'tcx, E> Encoder for CacheEncoder<'a, 'tcx, E>
{
type Error = E::Error;
#[inline]
fn emit_unit(&mut self) -> Result<(), Self::Error> {
Ok(())
}
encoder_methods! {
emit_usize(usize);
emit_u128(u128);
@@ -25,12 +25,11 @@ fn decode(d: &mut D) -> SmallVec<A> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for LinkedList<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@@ -43,12 +42,11 @@ fn decode(d: &mut D) -> LinkedList<T> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for VecDeque<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@@ -65,13 +63,12 @@ impl<S: Encoder, K, V> Encodable<S> for BTreeMap<K, V>
V: Encodable<S>,
{
fn encode(&self, e: &mut S) -> Result<(), S::Error> {
e.emit_map(self.len(), |e| {
for (i, (key, val)) in self.iter().enumerate() {
e.emit_map_elt_key(i, |e| key.encode(e))?;
e.emit_map_elt_val(|e| val.encode(e))?;
}
Ok(())
})
e.emit_usize(self.len())?;
for (key, val) in self.iter() {
key.encode(e)?;
val.encode(e)?;
}
Ok(())
}
}
@@ -97,12 +94,11 @@ impl<S: Encoder, T> Encodable<S> for BTreeSet<T>
T: Encodable<S> + PartialEq + Ord,
{
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@@ -127,13 +123,12 @@ impl<E: Encoder, K, V, S> Encodable<E> for HashMap<K, V, S>
S: BuildHasher,
{
fn encode(&self, e: &mut E) -> Result<(), E::Error> {
e.emit_map(self.len(), |e| {
for (i, (key, val)) in self.iter().enumerate() {
e.emit_map_elt_key(i, |e| key.encode(e))?;
e.emit_map_elt_val(|e| val.encode(e))?;
}
Ok(())
})
e.emit_usize(self.len())?;
for (key, val) in self.iter() {
key.encode(e)?;
val.encode(e)?;
}
Ok(())
}
}
@@ -162,12 +157,11 @@ impl<E: Encoder, T, S> Encodable<E> for HashSet<T, S>
S: BuildHasher,
{
fn encode(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
@@ -194,13 +188,12 @@ impl<E: Encoder, K, V, S> Encodable<E> for indexmap::IndexMap<K, V, S>
S: BuildHasher,
{
fn encode(&self, e: &mut E) -> Result<(), E::Error> {
e.emit_map(self.len(), |e| {
for (i, (key, val)) in self.iter().enumerate() {
e.emit_map_elt_key(i, |e| key.encode(e))?;
e.emit_map_elt_val(|e| val.encode(e))?;
}
Ok(())
})
e.emit_usize(self.len())?;
for (key, val) in self.iter() {
key.encode(e)?;
val.encode(e)?;
}
Ok(())
}
}
@@ -229,12 +222,11 @@ impl<E: Encoder, T, S> Encodable<E> for indexmap::IndexSet<T, S>
S: BuildHasher,
{
fn encode(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?;
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?;
}
Ok(())
}
}
-2368
View File
@@ -1,2368 +0,0 @@
// Rust JSON serialization library.
// Copyright (c) 2011 Google Inc.
#![forbid(non_camel_case_types)]
#![allow(missing_docs)]
//! JSON parsing and serialization
//!
//! # What is JSON?
//!
//! JSON (JavaScript Object Notation) is a way to write data in Javascript.
//! Like XML, it allows to encode structured data in a text format that can be easily read by humans
//! Its simple syntax and native compatibility with JavaScript have made it a widely used format.
//!
//! Data types that can be encoded are JavaScript types (see the `Json` enum for more details):
//!
//! * `Boolean`: equivalent to rust's `bool`
//! * `Number`: equivalent to rust's `f64`
//! * `String`: equivalent to rust's `String`
//! * `Array`: equivalent to rust's `Vec<T>`, but also allowing objects of different types in the
//! same array
//! * `Object`: equivalent to rust's `BTreeMap<String, json::Json>`
//! * `Null`
//!
//! An object is a series of string keys mapping to values, in `"key": value` format.
//! Arrays are enclosed in square brackets ([ ... ]) and objects in curly brackets ({ ... }).
//! A simple JSON document encoding a person, their age, address and phone numbers could look like
//!
//! ```json
//! {
//! "FirstName": "John",
//! "LastName": "Doe",
//! "Age": 43,
//! "Address": {
//! "Street": "Downing Street 10",
//! "City": "London",
//! "Country": "Great Britain"
//! },
//! "PhoneNumbers": [
//! "+44 1234567",
//! "+44 2345678"
//! ]
//! }
//! ```
//!
//! # Rust Type-based Encoding and Decoding
//!
//! To be able to encode a piece of data, it must implement the
//! `serialize::Encodable` trait. The `rustc_macros` crate provides an
//! annotation to automatically generate the code for this trait: `#[derive(Encodable)]`.
//!
//! The JSON API provides an enum `json::Json` and a trait `ToJson` to encode objects.
//! The `ToJson` trait provides a `to_json` method to convert an object into a `json::Json` value.
//! A `json::Json` value can be encoded as a string or buffer using the functions described above.
//! You can also use the `json::Encoder` object, which implements the `Encoder` trait.
//!
//! When using `ToJson` the `Encodable` trait implementation is not mandatory.
//!
//! # Examples of use
//!
//! ## Using Autoserialization
//!
//! Create a struct called `TestStruct` and serialize and deserialize it to and from JSON using the
//! serialization API, using the derived serialization code.
//!
//! ```rust
//! # #![feature(rustc_private)]
//! use rustc_macros::{Encodable};
//! use rustc_serialize::json;
//!
//! // Automatically generate `Encodable` trait implementations
//! #[derive(Encodable)]
//! pub struct TestStruct {
//! data_int: u8,
//! data_str: String,
//! data_vector: Vec<u8>,
//! }
//!
//! let object = TestStruct {
//! data_int: 1,
//! data_str: "homura".to_string(),
//! data_vector: vec![2,3,4,5],
//! };
//!
//! // Serialize using `json::encode`
//! let encoded = json::encode(&object).unwrap();
//! ```
//!
//! ## Using the `ToJson` trait
//!
//! The examples above use the `ToJson` trait to generate the JSON string, which is required
//! for custom mappings.
//!
//! ### Simple example of `ToJson` usage
//!
//! ```rust
//! # #![feature(rustc_private)]
//! use rustc_macros::Encodable;
//! use rustc_serialize::json::{self, ToJson, Json};
//!
//! // A custom data structure
//! struct ComplexNum {
//! a: f64,
//! b: f64,
//! }
//!
//! // JSON value representation
//! impl ToJson for ComplexNum {
//! fn to_json(&self) -> Json {
//! Json::String(format!("{}+{}i", self.a, self.b))
//! }
//! }
//!
//! // Only generate `Encodable` trait implementation
//! #[derive(Encodable)]
//! pub struct ComplexNumRecord {
//! uid: u8,
//! dsc: String,
//! val: Json,
//! }
//!
//! let num = ComplexNum { a: 0.0001, b: 12.539 };
//! let data: String = json::encode(&ComplexNumRecord{
//! uid: 1,
//! dsc: "test".to_string(),
//! val: num.to_json(),
//! }).unwrap();
//! println!("data: {}", data);
//! // data: {"uid":1,"dsc":"test","val":"0.0001+12.539i"};
//! ```
//!
//! ### Verbose example of `ToJson` usage
//!
//! ```rust
//! # #![feature(rustc_private)]
//! use std::collections::BTreeMap;
//! use rustc_serialize::json::{Json, ToJson};
//!
//! pub struct TestStruct {
//! data_int: u8,
//! data_str: String,
//! data_vector: Vec<u8>,
//! }
//!
//! // Specify encoding method manually
//! impl ToJson for TestStruct {
//! fn to_json(&self) -> Json {
//! let mut d = BTreeMap::new();
//! // All standard types implement `to_json()`, so use it
//! d.insert("data_int".to_string(), self.data_int.to_json());
//! d.insert("data_str".to_string(), self.data_str.to_json());
//! d.insert("data_vector".to_string(), self.data_vector.to_json());
//! Json::Object(d)
//! }
//! }
//!
//! // Serialize using `ToJson`
//! let input_data = TestStruct {
//! data_int: 1,
//! data_str: "madoka".to_string(),
//! data_vector: vec![2,3,4,5],
//! };
//! let json_obj: Json = input_data.to_json();
//! let json_str: String = json_obj.to_string();
//! ```
use self::ErrorCode::*;
use self::InternalStackElement::*;
use self::JsonEvent::*;
use self::ParserError::*;
use self::ParserState::*;
use std::borrow::Cow;
use std::collections::{BTreeMap, HashMap};
use std::mem::swap;
use std::num::FpCategory as Fp;
use std::ops::Index;
use std::str::FromStr;
use std::string;
use std::{char, fmt, str};
use crate::Encodable;
/// Represents a json value
#[derive(Clone, PartialEq, PartialOrd, Debug)]
pub enum Json {
I64(i64),
U64(u64),
F64(f64),
String(string::String),
Boolean(bool),
Array(self::Array),
Object(self::Object),
Null,
}
pub type Array = Vec<Json>;
pub type Object = BTreeMap<string::String, Json>;
pub struct PrettyJson<'a> {
inner: &'a Json,
}
pub struct AsJson<'a, T> {
inner: &'a T,
}
pub struct AsPrettyJson<'a, T> {
inner: &'a T,
indent: Option<usize>,
}
/// The errors that can arise while parsing a JSON stream.
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum ErrorCode {
InvalidSyntax,
InvalidNumber,
EOFWhileParsingObject,
EOFWhileParsingArray,
EOFWhileParsingValue,
EOFWhileParsingString,
KeyMustBeAString,
ExpectedColon,
TrailingCharacters,
TrailingComma,
InvalidEscape,
InvalidUnicodeCodePoint,
LoneLeadingSurrogateInHexEscape,
UnexpectedEndOfHexEscape,
UnrecognizedHex,
NotFourDigit,
NotUtf8,
}
#[derive(Clone, PartialEq, Debug)]
pub enum ParserError {
/// msg, line, col
SyntaxError(ErrorCode, usize, usize),
}
// Builder and Parser have the same errors.
pub type BuilderError = ParserError;
#[derive(Copy, Clone, Debug)]
pub enum EncoderError {
FmtError(fmt::Error),
BadHashmapKey,
}
/// Returns a readable error string for a given error code.
pub fn error_str(error: ErrorCode) -> &'static str {
match error {
InvalidSyntax => "invalid syntax",
InvalidNumber => "invalid number",
EOFWhileParsingObject => "EOF While parsing object",
EOFWhileParsingArray => "EOF While parsing array",
EOFWhileParsingValue => "EOF While parsing value",
EOFWhileParsingString => "EOF While parsing string",
KeyMustBeAString => "key must be a string",
ExpectedColon => "expected `:`",
TrailingCharacters => "trailing characters",
TrailingComma => "trailing comma",
InvalidEscape => "invalid escape",
UnrecognizedHex => "invalid \\u{ esc}ape (unrecognized hex)",
NotFourDigit => "invalid \\u{ esc}ape (not four digits)",
NotUtf8 => "contents not utf-8",
InvalidUnicodeCodePoint => "invalid Unicode code point",
LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape",
UnexpectedEndOfHexEscape => "unexpected end of hex escape",
}
}
/// Shortcut function to encode a `T` into a JSON `String`
pub fn encode<T: for<'r> crate::Encodable<Encoder<'r>>>(
object: &T,
) -> Result<string::String, EncoderError> {
let mut s = String::new();
{
let mut encoder = Encoder::new(&mut s);
object.encode(&mut encoder)?;
}
Ok(s)
}
impl fmt::Display for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
error_str(*self).fmt(f)
}
}
impl fmt::Display for ParserError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
}
impl fmt::Display for EncoderError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME this should be a nicer error
fmt::Debug::fmt(self, f)
}
}
impl std::error::Error for EncoderError {}
impl From<fmt::Error> for EncoderError {
/// Converts a [`fmt::Error`] into `EncoderError`
///
/// This conversion does not allocate memory.
fn from(err: fmt::Error) -> EncoderError {
EncoderError::FmtError(err)
}
}
pub type EncodeResult = Result<(), EncoderError>;
fn escape_str(wr: &mut dyn fmt::Write, v: &str) -> EncodeResult {
wr.write_str("\"")?;
let mut start = 0;
for (i, byte) in v.bytes().enumerate() {
let escaped = match byte {
b'"' => "\\\"",
b'\\' => "\\\\",
b'\x00' => "\\u0000",
b'\x01' => "\\u0001",
b'\x02' => "\\u0002",
b'\x03' => "\\u0003",
b'\x04' => "\\u0004",
b'\x05' => "\\u0005",
b'\x06' => "\\u0006",
b'\x07' => "\\u0007",
b'\x08' => "\\b",
b'\t' => "\\t",
b'\n' => "\\n",
b'\x0b' => "\\u000b",
b'\x0c' => "\\f",
b'\r' => "\\r",
b'\x0e' => "\\u000e",
b'\x0f' => "\\u000f",
b'\x10' => "\\u0010",
b'\x11' => "\\u0011",
b'\x12' => "\\u0012",
b'\x13' => "\\u0013",
b'\x14' => "\\u0014",
b'\x15' => "\\u0015",
b'\x16' => "\\u0016",
b'\x17' => "\\u0017",
b'\x18' => "\\u0018",
b'\x19' => "\\u0019",
b'\x1a' => "\\u001a",
b'\x1b' => "\\u001b",
b'\x1c' => "\\u001c",
b'\x1d' => "\\u001d",
b'\x1e' => "\\u001e",
b'\x1f' => "\\u001f",
b'\x7f' => "\\u007f",
_ => {
continue;
}
};
if start < i {
wr.write_str(&v[start..i])?;
}
wr.write_str(escaped)?;
start = i + 1;
}
if start != v.len() {
wr.write_str(&v[start..])?;
}
wr.write_str("\"")?;
Ok(())
}
fn escape_char(writer: &mut dyn fmt::Write, v: char) -> EncodeResult {
escape_str(writer, v.encode_utf8(&mut [0; 4]))
}
fn spaces(wr: &mut dyn fmt::Write, mut n: usize) -> EncodeResult {
const BUF: &str = " ";
while n >= BUF.len() {
wr.write_str(BUF)?;
n -= BUF.len();
}
if n > 0 {
wr.write_str(&BUF[..n])?;
}
Ok(())
}
fn fmt_number_or_null(v: f64) -> string::String {
match v.classify() {
Fp::Nan | Fp::Infinite => string::String::from("null"),
_ if v.fract() != 0f64 => v.to_string(),
_ => v.to_string() + ".0",
}
}
/// A structure for implementing serialization to JSON.
pub struct Encoder<'a> {
writer: &'a mut (dyn fmt::Write + 'a),
is_emitting_map_key: bool,
}
impl<'a> Encoder<'a> {
/// Creates a new JSON encoder whose output will be written to the writer
/// specified.
pub fn new(writer: &'a mut dyn fmt::Write) -> Encoder<'a> {
Encoder { writer, is_emitting_map_key: false }
}
}
macro_rules! emit_enquoted_if_mapkey {
($enc:ident,$e:expr) => {{
if $enc.is_emitting_map_key {
write!($enc.writer, "\"{}\"", $e)?;
} else {
write!($enc.writer, "{}", $e)?;
}
Ok(())
}};
}
impl<'a> crate::Encoder for Encoder<'a> {
type Error = EncoderError;
fn emit_unit(&mut self) -> EncodeResult {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, "null")?;
Ok(())
}
fn emit_usize(&mut self, v: usize) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u128(&mut self, v: u128) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u64(&mut self, v: u64) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u32(&mut self, v: u32) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u16(&mut self, v: u16) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u8(&mut self, v: u8) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_isize(&mut self, v: isize) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i128(&mut self, v: i128) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i64(&mut self, v: i64) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i32(&mut self, v: i32) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i16(&mut self, v: i16) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i8(&mut self, v: i8) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_bool(&mut self, v: bool) -> EncodeResult {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if v {
write!(self.writer, "true")?;
} else {
write!(self.writer, "false")?;
}
Ok(())
}
fn emit_f64(&mut self, v: f64) -> EncodeResult {
emit_enquoted_if_mapkey!(self, fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult {
self.emit_f64(f64::from(v))
}
fn emit_char(&mut self, v: char) -> EncodeResult {
escape_char(self.writer, v)
}
fn emit_str(&mut self, v: &str) -> EncodeResult {
escape_str(self.writer, v)
}
fn emit_raw_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error> {
for &c in s.iter() {
self.emit_u8(c)?;
}
Ok(())
}
fn emit_enum<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
f(self)
}
fn emit_enum_variant<F>(&mut self, name: &str, _id: usize, cnt: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
// enums are encoded as strings or objects
// Bunny => "Bunny"
// Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]}
if cnt == 0 {
escape_str(self.writer, name)
} else {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, "{{\"variant\":")?;
escape_str(self.writer, name)?;
write!(self.writer, ",\"fields\":[")?;
f(self)?;
write!(self.writer, "]}}")?;
Ok(())
}
}
fn emit_fieldless_enum_variant<const ID: usize>(
&mut self,
name: &str,
) -> Result<(), Self::Error> {
escape_str(self.writer, name)
}
fn emit_enum_variant_arg<F>(&mut self, first: bool, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if !first {
write!(self.writer, ",")?;
}
f(self)
}
fn emit_struct<F>(&mut self, _: bool, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, "{{")?;
f(self)?;
write!(self.writer, "}}")?;
Ok(())
}
fn emit_struct_field<F>(&mut self, name: &str, first: bool, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if !first {
write!(self.writer, ",")?;
}
escape_str(self.writer, name)?;
write!(self.writer, ":")?;
f(self)
}
fn emit_tuple<F>(&mut self, len: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
self.emit_seq(len, f)
}
fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
self.emit_seq_elt(idx, f)
}
fn emit_option<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
f(self)
}
fn emit_option_none(&mut self) -> EncodeResult {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
self.emit_unit()
}
fn emit_option_some<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
f(self)
}
fn emit_seq<F>(&mut self, _len: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, "[")?;
f(self)?;
write!(self.writer, "]")?;
Ok(())
}
fn emit_seq_elt<F>(&mut self, idx: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if idx != 0 {
write!(self.writer, ",")?;
}
f(self)
}
fn emit_map<F>(&mut self, _len: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, "{{")?;
f(self)?;
write!(self.writer, "}}")?;
Ok(())
}
fn emit_map_elt_key<F>(&mut self, idx: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if idx != 0 {
write!(self.writer, ",")?
}
self.is_emitting_map_key = true;
f(self)?;
self.is_emitting_map_key = false;
Ok(())
}
fn emit_map_elt_val<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut Encoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, ":")?;
f(self)
}
}
/// Another encoder for JSON, but prints out human-readable JSON instead of
/// compact data
pub struct PrettyEncoder<'a> {
writer: &'a mut (dyn fmt::Write + 'a),
curr_indent: usize,
indent: usize,
is_emitting_map_key: bool,
}
impl<'a> PrettyEncoder<'a> {
/// Creates a new encoder whose output will be written to the specified writer
pub fn new(writer: &'a mut dyn fmt::Write) -> PrettyEncoder<'a> {
PrettyEncoder { writer, curr_indent: 0, indent: 2, is_emitting_map_key: false }
}
/// Sets the number of spaces to indent for each level.
/// This is safe to set during encoding.
pub fn set_indent(&mut self, indent: usize) {
// self.indent very well could be 0 so we need to use checked division.
let level = self.curr_indent.checked_div(self.indent).unwrap_or(0);
self.indent = indent;
self.curr_indent = level * self.indent;
}
}
impl<'a> crate::Encoder for PrettyEncoder<'a> {
type Error = EncoderError;
fn emit_unit(&mut self) -> EncodeResult {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, "null")?;
Ok(())
}
fn emit_usize(&mut self, v: usize) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u128(&mut self, v: u128) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u64(&mut self, v: u64) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u32(&mut self, v: u32) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u16(&mut self, v: u16) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_u8(&mut self, v: u8) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_isize(&mut self, v: isize) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i128(&mut self, v: i128) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i64(&mut self, v: i64) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i32(&mut self, v: i32) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i16(&mut self, v: i16) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_i8(&mut self, v: i8) -> EncodeResult {
emit_enquoted_if_mapkey!(self, v)
}
fn emit_bool(&mut self, v: bool) -> EncodeResult {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if v {
write!(self.writer, "true")?;
} else {
write!(self.writer, "false")?;
}
Ok(())
}
fn emit_f64(&mut self, v: f64) -> EncodeResult {
emit_enquoted_if_mapkey!(self, fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult {
self.emit_f64(f64::from(v))
}
fn emit_char(&mut self, v: char) -> EncodeResult {
escape_char(self.writer, v)
}
fn emit_str(&mut self, v: &str) -> EncodeResult {
escape_str(self.writer, v)
}
fn emit_raw_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error> {
for &c in s.iter() {
self.emit_u8(c)?;
}
Ok(())
}
fn emit_enum<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
f(self)
}
fn emit_enum_variant<F>(&mut self, name: &str, _id: usize, cnt: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if cnt == 0 {
escape_str(self.writer, name)
} else {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
writeln!(self.writer, "{{")?;
self.curr_indent += self.indent;
spaces(self.writer, self.curr_indent)?;
write!(self.writer, "\"variant\": ")?;
escape_str(self.writer, name)?;
writeln!(self.writer, ",")?;
spaces(self.writer, self.curr_indent)?;
writeln!(self.writer, "\"fields\": [")?;
self.curr_indent += self.indent;
f(self)?;
self.curr_indent -= self.indent;
writeln!(self.writer)?;
spaces(self.writer, self.curr_indent)?;
self.curr_indent -= self.indent;
writeln!(self.writer, "]")?;
spaces(self.writer, self.curr_indent)?;
write!(self.writer, "}}")?;
Ok(())
}
}
fn emit_fieldless_enum_variant<const ID: usize>(
&mut self,
name: &str,
) -> Result<(), Self::Error> {
escape_str(self.writer, name)
}
fn emit_enum_variant_arg<F>(&mut self, first: bool, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if !first {
writeln!(self.writer, ",")?;
}
spaces(self.writer, self.curr_indent)?;
f(self)
}
fn emit_struct<F>(&mut self, no_fields: bool, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if no_fields {
write!(self.writer, "{{}}")?;
} else {
write!(self.writer, "{{")?;
self.curr_indent += self.indent;
f(self)?;
self.curr_indent -= self.indent;
writeln!(self.writer)?;
spaces(self.writer, self.curr_indent)?;
write!(self.writer, "}}")?;
}
Ok(())
}
fn emit_struct_field<F>(&mut self, name: &str, first: bool, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if first {
writeln!(self.writer)?;
} else {
writeln!(self.writer, ",")?;
}
spaces(self.writer, self.curr_indent)?;
escape_str(self.writer, name)?;
write!(self.writer, ": ")?;
f(self)
}
fn emit_tuple<F>(&mut self, len: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
self.emit_seq(len, f)
}
fn emit_tuple_arg<F>(&mut self, idx: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
self.emit_seq_elt(idx, f)
}
fn emit_option<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
f(self)
}
fn emit_option_none(&mut self) -> EncodeResult {
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
self.emit_unit()
}
fn emit_option_some<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
f(self)
}
fn emit_seq<F>(&mut self, len: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if len == 0 {
write!(self.writer, "[]")?;
} else {
write!(self.writer, "[")?;
self.curr_indent += self.indent;
f(self)?;
self.curr_indent -= self.indent;
writeln!(self.writer)?;
spaces(self.writer, self.curr_indent)?;
write!(self.writer, "]")?;
}
Ok(())
}
fn emit_seq_elt<F>(&mut self, idx: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if idx == 0 {
writeln!(self.writer)?;
} else {
writeln!(self.writer, ",")?;
}
spaces(self.writer, self.curr_indent)?;
f(self)
}
fn emit_map<F>(&mut self, len: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if len == 0 {
write!(self.writer, "{{}}")?;
} else {
write!(self.writer, "{{")?;
self.curr_indent += self.indent;
f(self)?;
self.curr_indent -= self.indent;
writeln!(self.writer)?;
spaces(self.writer, self.curr_indent)?;
write!(self.writer, "}}")?;
}
Ok(())
}
fn emit_map_elt_key<F>(&mut self, idx: usize, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
if idx == 0 {
writeln!(self.writer)?;
} else {
writeln!(self.writer, ",")?;
}
spaces(self.writer, self.curr_indent)?;
self.is_emitting_map_key = true;
f(self)?;
self.is_emitting_map_key = false;
Ok(())
}
fn emit_map_elt_val<F>(&mut self, f: F) -> EncodeResult
where
F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult,
{
if self.is_emitting_map_key {
return Err(EncoderError::BadHashmapKey);
}
write!(self.writer, ": ")?;
f(self)
}
}
impl<E: crate::Encoder> Encodable<E> for Json {
fn encode(&self, e: &mut E) -> Result<(), E::Error> {
match *self {
Json::I64(v) => v.encode(e),
Json::U64(v) => v.encode(e),
Json::F64(v) => v.encode(e),
Json::String(ref v) => v.encode(e),
Json::Boolean(v) => v.encode(e),
Json::Array(ref v) => v.encode(e),
Json::Object(ref v) => v.encode(e),
Json::Null => e.emit_unit(),
}
}
}
/// Creates an `AsJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
pub fn as_json<T>(t: &T) -> AsJson<'_, T> {
AsJson { inner: t }
}
/// Creates an `AsPrettyJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<'_, T> {
AsPrettyJson { inner: t, indent: None }
}
impl Json {
/// Borrow this json object as a pretty object to generate a pretty
/// representation for it via `Display`.
pub fn pretty(&self) -> PrettyJson<'_> {
PrettyJson { inner: self }
}
/// If the Json value is an Object, returns the value associated with the provided key.
/// Otherwise, returns None.
pub fn find(&self, key: &str) -> Option<&Json> {
match *self {
Json::Object(ref map) => map.get(key),
_ => None,
}
}
/// If the Json value is an Object, deletes the value associated with the
/// provided key from the Object and returns it. Otherwise, returns None.
pub fn remove_key(&mut self, key: &str) -> Option<Json> {
match *self {
Json::Object(ref mut map) => map.remove(key),
_ => None,
}
}
/// Attempts to get a nested Json Object for each key in `keys`.
/// If any key is found not to exist, `find_path` will return `None`.
/// Otherwise, it will return the Json value associated with the final key.
pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json> {
let mut target = self;
for key in keys {
target = target.find(*key)?;
}
Some(target)
}
/// If the Json value is an Object, performs a depth-first search until
/// a value associated with the provided key is found. If no value is found
/// or the Json value is not an Object, returns `None`.
pub fn search(&self, key: &str) -> Option<&Json> {
match *self {
Json::Object(ref map) => match map.get(key) {
Some(json_value) => Some(json_value),
None => {
for v in map.values() {
match v.search(key) {
x if x.is_some() => return x,
_ => (),
}
}
None
}
},
_ => None,
}
}
/// Returns `true` if the Json value is an `Object`.
pub fn is_object(&self) -> bool {
self.as_object().is_some()
}
/// If the Json value is an `Object`, returns the associated `BTreeMap`;
/// returns `None` otherwise.
pub fn as_object(&self) -> Option<&Object> {
match *self {
Json::Object(ref map) => Some(map),
_ => None,
}
}
/// Returns `true` if the Json value is an `Array`.
pub fn is_array(&self) -> bool {
self.as_array().is_some()
}
/// If the Json value is an `Array`, returns the associated vector;
/// returns `None` otherwise.
pub fn as_array(&self) -> Option<&Array> {
match *self {
Json::Array(ref array) => Some(&*array),
_ => None,
}
}
/// Returns `true` if the Json value is a `String`.
pub fn is_string(&self) -> bool {
self.as_string().is_some()
}
/// If the Json value is a `String`, returns the associated `str`;
/// returns `None` otherwise.
pub fn as_string(&self) -> Option<&str> {
match *self {
Json::String(ref s) => Some(&s[..]),
_ => None,
}
}
/// Returns `true` if the Json value is a `Number`.
pub fn is_number(&self) -> bool {
matches!(*self, Json::I64(_) | Json::U64(_) | Json::F64(_))
}
/// Returns `true` if the Json value is an `i64`.
pub fn is_i64(&self) -> bool {
matches!(*self, Json::I64(_))
}
/// Returns `true` if the Json value is a `u64`.
pub fn is_u64(&self) -> bool {
matches!(*self, Json::U64(_))
}
/// Returns `true` if the Json value is a `f64`.
pub fn is_f64(&self) -> bool {
matches!(*self, Json::F64(_))
}
/// If the Json value is a number, returns or cast it to an `i64`;
/// returns `None` otherwise.
pub fn as_i64(&self) -> Option<i64> {
match *self {
Json::I64(n) => Some(n),
Json::U64(n) => Some(n as i64),
_ => None,
}
}
/// If the Json value is a number, returns or cast it to a `u64`;
/// returns `None` otherwise.
pub fn as_u64(&self) -> Option<u64> {
match *self {
Json::I64(n) => Some(n as u64),
Json::U64(n) => Some(n),
_ => None,
}
}
/// If the Json value is a number, returns or cast it to a `f64`;
/// returns `None` otherwise.
pub fn as_f64(&self) -> Option<f64> {
match *self {
Json::I64(n) => Some(n as f64),
Json::U64(n) => Some(n as f64),
Json::F64(n) => Some(n),
_ => None,
}
}
/// Returns `true` if the Json value is a `Boolean`.
pub fn is_boolean(&self) -> bool {
self.as_boolean().is_some()
}
/// If the Json value is a `Boolean`, returns the associated `bool`;
/// returns `None` otherwise.
pub fn as_boolean(&self) -> Option<bool> {
match *self {
Json::Boolean(b) => Some(b),
_ => None,
}
}
/// Returns `true` if the Json value is a `Null`.
pub fn is_null(&self) -> bool {
self.as_null().is_some()
}
/// If the Json value is a `Null`, returns `()`;
/// returns `None` otherwise.
pub fn as_null(&self) -> Option<()> {
match *self {
Json::Null => Some(()),
_ => None,
}
}
}
impl<'a> Index<&'a str> for Json {
type Output = Json;
fn index(&self, idx: &'a str) -> &Json {
self.find(idx).unwrap()
}
}
impl Index<usize> for Json {
type Output = Json;
fn index(&self, idx: usize) -> &Json {
match *self {
Json::Array(ref v) => &v[idx],
_ => panic!("can only index Json with usize if it is an array"),
}
}
}
/// The output of the streaming parser.
#[derive(PartialEq, Clone, Debug)]
pub enum JsonEvent {
ObjectStart,
ObjectEnd,
ArrayStart,
ArrayEnd,
BooleanValue(bool),
I64Value(i64),
U64Value(u64),
F64Value(f64),
StringValue(string::String),
NullValue,
Error(ParserError),
}
#[derive(PartialEq, Debug)]
enum ParserState {
// Parse a value in an array, true means first element.
ParseArray(bool),
// Parse ',' or ']' after an element in an array.
ParseArrayComma,
// Parse a key:value in an object, true means first element.
ParseObject(bool),
// Parse ',' or ']' after an element in an object.
ParseObjectComma,
// Initial state.
ParseStart,
// Expecting the stream to end.
ParseBeforeFinish,
// Parsing can't continue.
ParseFinished,
}
/// A Stack represents the current position of the parser in the logical
/// structure of the JSON stream.
///
/// An example is `foo.bar[3].x`.
#[derive(Default)]
pub struct Stack {
stack: Vec<InternalStackElement>,
str_buffer: Vec<u8>,
}
/// StackElements compose a Stack.
///
/// As an example, `StackElement::Key("foo")`, `StackElement::Key("bar")`,
/// `StackElement::Index(3)`, and `StackElement::Key("x")` are the
/// StackElements composing the stack that represents `foo.bar[3].x`.
#[derive(PartialEq, Clone, Debug)]
pub enum StackElement<'l> {
Index(u32),
Key(&'l str),
}
// Internally, Key elements are stored as indices in a buffer to avoid
// allocating a string for every member of an object.
#[derive(PartialEq, Clone, Debug)]
enum InternalStackElement {
InternalIndex(u32),
InternalKey(u16, u16), // start, size
}
impl Stack {
pub fn new() -> Stack {
Self::default()
}
/// Returns The number of elements in the Stack.
pub fn len(&self) -> usize {
self.stack.len()
}
/// Returns `true` if the stack is empty.
pub fn is_empty(&self) -> bool {
self.stack.is_empty()
}
/// Provides access to the StackElement at a given index.
/// lower indices are at the bottom of the stack while higher indices are
/// at the top.
pub fn get(&self, idx: usize) -> StackElement<'_> {
match self.stack[idx] {
InternalIndex(i) => StackElement::Index(i),
InternalKey(start, size) => StackElement::Key(
str::from_utf8(&self.str_buffer[start as usize..start as usize + size as usize])
.unwrap(),
),
}
}
/// Compares this stack with an array of StackElement<'_>s.
pub fn is_equal_to(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() != rhs.len() {
return false;
}
for (i, r) in rhs.iter().enumerate() {
if self.get(i) != *r {
return false;
}
}
true
}
/// Returns `true` if the bottom-most elements of this stack are the same as
/// the ones passed as parameter.
pub fn starts_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() {
return false;
}
for (i, r) in rhs.iter().enumerate() {
if self.get(i) != *r {
return false;
}
}
true
}
/// Returns `true` if the top-most elements of this stack are the same as
/// the ones passed as parameter.
pub fn ends_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() {
return false;
}
let offset = self.stack.len() - rhs.len();
for (i, r) in rhs.iter().enumerate() {
if self.get(i + offset) != *r {
return false;
}
}
true
}
/// Returns the top-most element (if any).
pub fn top(&self) -> Option<StackElement<'_>> {
match self.stack.last() {
None => None,
Some(&InternalIndex(i)) => Some(StackElement::Index(i)),
Some(&InternalKey(start, size)) => Some(StackElement::Key(
str::from_utf8(&self.str_buffer[start as usize..(start + size) as usize]).unwrap(),
)),
}
}
// Used by Parser to insert StackElement::Key elements at the top of the stack.
fn push_key(&mut self, key: string::String) {
self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16));
self.str_buffer.extend(key.as_bytes());
}
// Used by Parser to insert StackElement::Index elements at the top of the stack.
fn push_index(&mut self, index: u32) {
self.stack.push(InternalIndex(index));
}
// Used by Parser to remove the top-most element of the stack.
fn pop(&mut self) {
assert!(!self.is_empty());
match *self.stack.last().unwrap() {
InternalKey(_, sz) => {
let new_size = self.str_buffer.len() - sz as usize;
self.str_buffer.truncate(new_size);
}
InternalIndex(_) => {}
}
self.stack.pop();
}
// Used by Parser to test whether the top-most element is an index.
fn last_is_index(&self) -> bool {
matches!(self.stack.last(), Some(InternalIndex(_)))
}
// Used by Parser to increment the index of the top-most element.
fn bump_index(&mut self) {
let len = self.stack.len();
let idx = match *self.stack.last().unwrap() {
InternalIndex(i) => i + 1,
_ => {
panic!();
}
};
self.stack[len - 1] = InternalIndex(idx);
}
}
/// A streaming JSON parser implemented as an iterator of JsonEvent, consuming
/// an iterator of char.
pub struct Parser<T> {
rdr: T,
ch: Option<char>,
line: usize,
col: usize,
// We maintain a stack representing where we are in the logical structure
// of the JSON stream.
stack: Stack,
// A state machine is kept to make it possible to interrupt and resume parsing.
state: ParserState,
}
impl<T: Iterator<Item = char>> Iterator for Parser<T> {
type Item = JsonEvent;
fn next(&mut self) -> Option<JsonEvent> {
if self.state == ParseFinished {
return None;
}
if self.state == ParseBeforeFinish {
self.parse_whitespace();
// Make sure there is no trailing characters.
if self.eof() {
self.state = ParseFinished;
return None;
} else {
return Some(self.error_event(TrailingCharacters));
}
}
Some(self.parse())
}
}
impl<T: Iterator<Item = char>> Parser<T> {
/// Creates the JSON parser.
pub fn new(rdr: T) -> Parser<T> {
let mut p = Parser {
rdr,
ch: Some('\x00'),
line: 1,
col: 0,
stack: Stack::new(),
state: ParseStart,
};
p.bump();
p
}
/// Provides access to the current position in the logical structure of the
/// JSON stream.
pub fn stack(&self) -> &Stack {
&self.stack
}
fn eof(&self) -> bool {
self.ch.is_none()
}
fn ch_or_null(&self) -> char {
self.ch.unwrap_or('\x00')
}
fn bump(&mut self) {
self.ch = self.rdr.next();
if self.ch_is('\n') {
self.line += 1;
self.col = 1;
} else {
self.col += 1;
}
}
fn next_char(&mut self) -> Option<char> {
self.bump();
self.ch
}
fn ch_is(&self, c: char) -> bool {
self.ch == Some(c)
}
fn error<U>(&self, reason: ErrorCode) -> Result<U, ParserError> {
Err(SyntaxError(reason, self.line, self.col))
}
fn parse_whitespace(&mut self) {
while self.ch_is(' ') || self.ch_is('\n') || self.ch_is('\t') || self.ch_is('\r') {
self.bump();
}
}
fn parse_number(&mut self) -> JsonEvent {
let neg = if self.ch_is('-') {
self.bump();
true
} else {
false
};
let res = match self.parse_u64() {
Ok(res) => res,
Err(e) => {
return Error(e);
}
};
if self.ch_is('.') || self.ch_is('e') || self.ch_is('E') {
let mut res = res as f64;
if self.ch_is('.') {
res = match self.parse_decimal(res) {
Ok(res) => res,
Err(e) => {
return Error(e);
}
};
}
if self.ch_is('e') || self.ch_is('E') {
res = match self.parse_exponent(res) {
Ok(res) => res,
Err(e) => {
return Error(e);
}
};
}
if neg {
res *= -1.0;
}
F64Value(res)
} else if neg {
let res = (res as i64).wrapping_neg();
// Make sure we didn't underflow.
if res > 0 {
Error(SyntaxError(InvalidNumber, self.line, self.col))
} else {
I64Value(res)
}
} else {
U64Value(res)
}
}
fn parse_u64(&mut self) -> Result<u64, ParserError> {
let mut accum = 0u64;
let last_accum = 0; // necessary to detect overflow.
match self.ch_or_null() {
'0' => {
self.bump();
// A leading '0' must be the only digit before the decimal point.
if let '0'..='9' = self.ch_or_null() {
return self.error(InvalidNumber);
}
}
'1'..='9' => {
while !self.eof() {
match self.ch_or_null() {
c @ '0'..='9' => {
accum = accum.wrapping_mul(10);
accum = accum.wrapping_add((c as u64) - ('0' as u64));
// Detect overflow by comparing to the last value.
if accum <= last_accum {
return self.error(InvalidNumber);
}
self.bump();
}
_ => break,
}
}
}
_ => return self.error(InvalidNumber),
}
Ok(accum)
}
fn parse_decimal(&mut self, mut res: f64) -> Result<f64, ParserError> {
self.bump();
// Make sure a digit follows the decimal place.
match self.ch_or_null() {
'0'..='9' => (),
_ => return self.error(InvalidNumber),
}
let mut dec = 1.0;
while !self.eof() {
match self.ch_or_null() {
c @ '0'..='9' => {
dec /= 10.0;
res += (((c as isize) - ('0' as isize)) as f64) * dec;
self.bump();
}
_ => break,
}
}
Ok(res)
}
fn parse_exponent(&mut self, mut res: f64) -> Result<f64, ParserError> {
self.bump();
let mut exp = 0;
let mut neg_exp = false;
if self.ch_is('+') {
self.bump();
} else if self.ch_is('-') {
self.bump();
neg_exp = true;
}
// Make sure a digit follows the exponent place.
match self.ch_or_null() {
'0'..='9' => (),
_ => return self.error(InvalidNumber),
}
while !self.eof() {
match self.ch_or_null() {
c @ '0'..='9' => {
exp *= 10;
exp += (c as usize) - ('0' as usize);
self.bump();
}
_ => break,
}
}
let exp = 10_f64.powi(exp as i32);
if neg_exp {
res /= exp;
} else {
res *= exp;
}
Ok(res)
}
fn decode_hex_escape(&mut self) -> Result<u16, ParserError> {
let mut i = 0;
let mut n = 0;
while i < 4 && !self.eof() {
self.bump();
n = match self.ch_or_null() {
c @ '0'..='9' => n * 16 + ((c as u16) - ('0' as u16)),
'a' | 'A' => n * 16 + 10,
'b' | 'B' => n * 16 + 11,
'c' | 'C' => n * 16 + 12,
'd' | 'D' => n * 16 + 13,
'e' | 'E' => n * 16 + 14,
'f' | 'F' => n * 16 + 15,
_ => return self.error(InvalidEscape),
};
i += 1;
}
// Error out if we didn't parse 4 digits.
if i != 4 {
return self.error(InvalidEscape);
}
Ok(n)
}
fn parse_str(&mut self) -> Result<string::String, ParserError> {
let mut escape = false;
let mut res = string::String::new();
loop {
self.bump();
if self.eof() {
return self.error(EOFWhileParsingString);
}
if escape {
match self.ch_or_null() {
'"' => res.push('"'),
'\\' => res.push('\\'),
'/' => res.push('/'),
'b' => res.push('\x08'),
'f' => res.push('\x0c'),
'n' => res.push('\n'),
'r' => res.push('\r'),
't' => res.push('\t'),
'u' => match self.decode_hex_escape()? {
0xDC00..=0xDFFF => return self.error(LoneLeadingSurrogateInHexEscape),
// Non-BMP characters are encoded as a sequence of
// two hex escapes, representing UTF-16 surrogates.
n1 @ 0xD800..=0xDBFF => {
match (self.next_char(), self.next_char()) {
(Some('\\'), Some('u')) => (),
_ => return self.error(UnexpectedEndOfHexEscape),
}
let n2 = self.decode_hex_escape()?;
if !(0xDC00..=0xDFFF).contains(&n2) {
return self.error(LoneLeadingSurrogateInHexEscape);
}
let c =
(u32::from(n1 - 0xD800) << 10 | u32::from(n2 - 0xDC00)) + 0x1_0000;
res.push(char::from_u32(c).unwrap());
}
n => match char::from_u32(u32::from(n)) {
Some(c) => res.push(c),
None => return self.error(InvalidUnicodeCodePoint),
},
},
_ => return self.error(InvalidEscape),
}
escape = false;
} else if self.ch_is('\\') {
escape = true;
} else {
match self.ch {
Some('"') => {
self.bump();
return Ok(res);
}
Some(c) => res.push(c),
None => unreachable!(),
}
}
}
}
// Invoked at each iteration, consumes the stream until it has enough
// information to return a JsonEvent.
// Manages an internal state so that parsing can be interrupted and resumed.
// Also keeps track of the position in the logical structure of the json
// stream isize the form of a stack that can be queried by the user using the
// stack() method.
fn parse(&mut self) -> JsonEvent {
loop {
// The only paths where the loop can spin a new iteration
// are in the cases ParseArrayComma and ParseObjectComma if ','
// is parsed. In these cases the state is set to (respectively)
// ParseArray(false) and ParseObject(false), which always return,
// so there is no risk of getting stuck in an infinite loop.
// All other paths return before the end of the loop's iteration.
self.parse_whitespace();
match self.state {
ParseStart => {
return self.parse_start();
}
ParseArray(first) => {
return self.parse_array(first);
}
ParseArrayComma => {
if let Some(evt) = self.parse_array_comma_or_end() {
return evt;
}
}
ParseObject(first) => {
return self.parse_object(first);
}
ParseObjectComma => {
self.stack.pop();
if self.ch_is(',') {
self.state = ParseObject(false);
self.bump();
} else {
return self.parse_object_end();
}
}
_ => {
return self.error_event(InvalidSyntax);
}
}
}
}
fn parse_start(&mut self) -> JsonEvent {
let val = self.parse_value();
self.state = match val {
Error(_) => ParseFinished,
ArrayStart => ParseArray(true),
ObjectStart => ParseObject(true),
_ => ParseBeforeFinish,
};
val
}
fn parse_array(&mut self, first: bool) -> JsonEvent {
if self.ch_is(']') {
if !first {
self.error_event(InvalidSyntax)
} else {
self.state = if self.stack.is_empty() {
ParseBeforeFinish
} else if self.stack.last_is_index() {
ParseArrayComma
} else {
ParseObjectComma
};
self.bump();
ArrayEnd
}
} else {
if first {
self.stack.push_index(0);
}
let val = self.parse_value();
self.state = match val {
Error(_) => ParseFinished,
ArrayStart => ParseArray(true),
ObjectStart => ParseObject(true),
_ => ParseArrayComma,
};
val
}
}
fn parse_array_comma_or_end(&mut self) -> Option<JsonEvent> {
if self.ch_is(',') {
self.stack.bump_index();
self.state = ParseArray(false);
self.bump();
None
} else if self.ch_is(']') {
self.stack.pop();
self.state = if self.stack.is_empty() {
ParseBeforeFinish
} else if self.stack.last_is_index() {
ParseArrayComma
} else {
ParseObjectComma
};
self.bump();
Some(ArrayEnd)
} else if self.eof() {
Some(self.error_event(EOFWhileParsingArray))
} else {
Some(self.error_event(InvalidSyntax))
}
}
fn parse_object(&mut self, first: bool) -> JsonEvent {
if self.ch_is('}') {
if !first {
if self.stack.is_empty() {
return self.error_event(TrailingComma);
} else {
self.stack.pop();
}
}
self.state = if self.stack.is_empty() {
ParseBeforeFinish
} else if self.stack.last_is_index() {
ParseArrayComma
} else {
ParseObjectComma
};
self.bump();
return ObjectEnd;
}
if self.eof() {
return self.error_event(EOFWhileParsingObject);
}
if !self.ch_is('"') {
return self.error_event(KeyMustBeAString);
}
let s = match self.parse_str() {
Ok(s) => s,
Err(e) => {
self.state = ParseFinished;
return Error(e);
}
};
self.parse_whitespace();
if self.eof() {
return self.error_event(EOFWhileParsingObject);
} else if self.ch_or_null() != ':' {
return self.error_event(ExpectedColon);
}
self.stack.push_key(s);
self.bump();
self.parse_whitespace();
let val = self.parse_value();
self.state = match val {
Error(_) => ParseFinished,
ArrayStart => ParseArray(true),
ObjectStart => ParseObject(true),
_ => ParseObjectComma,
};
val
}
fn parse_object_end(&mut self) -> JsonEvent {
if self.ch_is('}') {
self.state = if self.stack.is_empty() {
ParseBeforeFinish
} else if self.stack.last_is_index() {
ParseArrayComma
} else {
ParseObjectComma
};
self.bump();
ObjectEnd
} else if self.eof() {
self.error_event(EOFWhileParsingObject)
} else {
self.error_event(InvalidSyntax)
}
}
fn parse_value(&mut self) -> JsonEvent {
if self.eof() {
return self.error_event(EOFWhileParsingValue);
}
match self.ch_or_null() {
'n' => self.parse_ident("ull", NullValue),
't' => self.parse_ident("rue", BooleanValue(true)),
'f' => self.parse_ident("alse", BooleanValue(false)),
'0'..='9' | '-' => self.parse_number(),
'"' => match self.parse_str() {
Ok(s) => StringValue(s),
Err(e) => Error(e),
},
'[' => {
self.bump();
ArrayStart
}
'{' => {
self.bump();
ObjectStart
}
_ => self.error_event(InvalidSyntax),
}
}
fn parse_ident(&mut self, ident: &str, value: JsonEvent) -> JsonEvent {
if ident.chars().all(|c| Some(c) == self.next_char()) {
self.bump();
value
} else {
Error(SyntaxError(InvalidSyntax, self.line, self.col))
}
}
fn error_event(&mut self, reason: ErrorCode) -> JsonEvent {
self.state = ParseFinished;
Error(SyntaxError(reason, self.line, self.col))
}
}
/// A Builder consumes a json::Parser to create a generic Json structure.
pub struct Builder<T> {
parser: Parser<T>,
token: Option<JsonEvent>,
}
impl<T: Iterator<Item = char>> Builder<T> {
/// Creates a JSON Builder.
pub fn new(src: T) -> Builder<T> {
Builder { parser: Parser::new(src), token: None }
}
// Decode a Json value from a Parser.
pub fn build(&mut self) -> Result<Json, BuilderError> {
self.bump();
let result = self.build_value();
self.bump();
match self.token {
None => {}
Some(Error(ref e)) => {
return Err(e.clone());
}
ref tok => {
panic!("unexpected token {:?}", tok.clone());
}
}
result
}
fn bump(&mut self) {
self.token = self.parser.next();
}
fn build_value(&mut self) -> Result<Json, BuilderError> {
match self.token {
Some(NullValue) => Ok(Json::Null),
Some(I64Value(n)) => Ok(Json::I64(n)),
Some(U64Value(n)) => Ok(Json::U64(n)),
Some(F64Value(n)) => Ok(Json::F64(n)),
Some(BooleanValue(b)) => Ok(Json::Boolean(b)),
Some(StringValue(ref mut s)) => {
let mut temp = string::String::new();
swap(s, &mut temp);
Ok(Json::String(temp))
}
Some(Error(ref e)) => Err(e.clone()),
Some(ArrayStart) => self.build_array(),
Some(ObjectStart) => self.build_object(),
Some(ObjectEnd) => self.parser.error(InvalidSyntax),
Some(ArrayEnd) => self.parser.error(InvalidSyntax),
None => self.parser.error(EOFWhileParsingValue),
}
}
fn build_array(&mut self) -> Result<Json, BuilderError> {
self.bump();
let mut values = Vec::new();
loop {
if self.token == Some(ArrayEnd) {
return Ok(Json::Array(values.into_iter().collect()));
}
match self.build_value() {
Ok(v) => values.push(v),
Err(e) => return Err(e),
}
self.bump();
}
}
fn build_object(&mut self) -> Result<Json, BuilderError> {
self.bump();
let mut values = BTreeMap::new();
loop {
match self.token {
Some(ObjectEnd) => {
return Ok(Json::Object(values));
}
Some(Error(ref e)) => {
return Err(e.clone());
}
None => {
break;
}
_ => {}
}
let key = match self.parser.stack().top() {
Some(StackElement::Key(k)) => k.to_owned(),
_ => {
panic!("invalid state");
}
};
match self.build_value() {
Ok(value) => {
values.insert(key, value);
}
Err(e) => {
return Err(e);
}
}
self.bump();
}
self.parser.error(EOFWhileParsingObject)
}
}
/// Decodes a json value from a string
pub fn from_str(s: &str) -> Result<Json, BuilderError> {
let mut builder = Builder::new(s.chars());
builder.build()
}
/// A trait for converting values to JSON
pub trait ToJson {
/// Converts the value of `self` to an instance of JSON
fn to_json(&self) -> Json;
}
macro_rules! to_json_impl_i64 {
($($t:ty), +) => (
$(impl ToJson for $t {
fn to_json(&self) -> Json {
Json::I64(*self as i64)
}
})+
)
}
to_json_impl_i64! { isize, i8, i16, i32, i64 }
macro_rules! to_json_impl_u64 {
($($t:ty), +) => (
$(impl ToJson for $t {
fn to_json(&self) -> Json {
Json::U64(*self as u64)
}
})+
)
}
to_json_impl_u64! { usize, u8, u16, u32, u64 }
impl ToJson for Json {
fn to_json(&self) -> Json {
self.clone()
}
}
impl ToJson for f32 {
fn to_json(&self) -> Json {
f64::from(*self).to_json()
}
}
impl ToJson for f64 {
fn to_json(&self) -> Json {
match self.classify() {
Fp::Nan | Fp::Infinite => Json::Null,
_ => Json::F64(*self),
}
}
}
impl ToJson for () {
fn to_json(&self) -> Json {
Json::Null
}
}
impl ToJson for bool {
fn to_json(&self) -> Json {
Json::Boolean(*self)
}
}
impl ToJson for str {
fn to_json(&self) -> Json {
Json::String(self.to_string())
}
}
impl ToJson for string::String {
fn to_json(&self) -> Json {
Json::String((*self).clone())
}
}
impl<'a> ToJson for Cow<'a, str> {
fn to_json(&self) -> Json {
Json::String(self.to_string())
}
}
macro_rules! tuple_impl {
// use variables to indicate the arity of the tuple
($($tyvar:ident),* ) => {
// the trailing commas are for the 1 tuple
impl<
$( $tyvar : ToJson ),*
> ToJson for ( $( $tyvar ),* , ) {
#[inline]
#[allow(non_snake_case)]
fn to_json(&self) -> Json {
match *self {
($(ref $tyvar),*,) => Json::Array(vec![$($tyvar.to_json()),*])
}
}
}
}
}
tuple_impl! {A}
tuple_impl! {A, B}
tuple_impl! {A, B, C}
tuple_impl! {A, B, C, D}
tuple_impl! {A, B, C, D, E}
tuple_impl! {A, B, C, D, E, F}
tuple_impl! {A, B, C, D, E, F, G}
tuple_impl! {A, B, C, D, E, F, G, H}
tuple_impl! {A, B, C, D, E, F, G, H, I}
tuple_impl! {A, B, C, D, E, F, G, H, I, J}
tuple_impl! {A, B, C, D, E, F, G, H, I, J, K}
tuple_impl! {A, B, C, D, E, F, G, H, I, J, K, L}
impl<A: ToJson> ToJson for [A] {
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<A: ToJson> ToJson for Vec<A> {
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<'a, A: ToJson> ToJson for Cow<'a, [A]>
where
[A]: ToOwned,
{
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<T: ToString, A: ToJson> ToJson for BTreeMap<T, A> {
fn to_json(&self) -> Json {
let mut d = BTreeMap::new();
for (key, value) in self {
d.insert(key.to_string(), value.to_json());
}
Json::Object(d)
}
}
impl<A: ToJson> ToJson for HashMap<string::String, A> {
fn to_json(&self) -> Json {
let mut d = BTreeMap::new();
for (key, value) in self {
d.insert((*key).clone(), value.to_json());
}
Json::Object(d)
}
}
impl<A: ToJson> ToJson for Option<A> {
fn to_json(&self) -> Json {
match *self {
None => Json::Null,
Some(ref value) => value.to_json(),
}
}
}
struct FormatShim<'a, 'b> {
inner: &'a mut fmt::Formatter<'b>,
}
impl<'a, 'b> fmt::Write for FormatShim<'a, 'b> {
fn write_str(&mut self, s: &str) -> fmt::Result {
match self.inner.write_str(s) {
Ok(_) => Ok(()),
Err(_) => Err(fmt::Error),
}
}
}
impl fmt::Display for Json {
/// Encodes a json value into a string
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = Encoder::new(&mut shim);
match self.encode(&mut encoder) {
Ok(_) => Ok(()),
Err(_) => Err(fmt::Error),
}
}
}
impl<'a> fmt::Display for PrettyJson<'a> {
/// Encodes a json value into a string
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = PrettyEncoder::new(&mut shim);
match self.inner.encode(&mut encoder) {
Ok(_) => Ok(()),
Err(_) => Err(fmt::Error),
}
}
}
impl<'a, T: for<'r> Encodable<Encoder<'r>>> fmt::Display for AsJson<'a, T> {
/// Encodes a json value into a string
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = Encoder::new(&mut shim);
match self.inner.encode(&mut encoder) {
Ok(_) => Ok(()),
Err(_) => Err(fmt::Error),
}
}
}
impl<'a, T> AsPrettyJson<'a, T> {
/// Sets the indentation level for the emitted JSON
pub fn indent(mut self, indent: usize) -> AsPrettyJson<'a, T> {
self.indent = Some(indent);
self
}
}
impl<'a, T: for<'x> Encodable<PrettyEncoder<'x>>> fmt::Display for AsPrettyJson<'a, T> {
/// Encodes a json value into a string
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut shim = FormatShim { inner: f };
let mut encoder = PrettyEncoder::new(&mut shim);
if let Some(n) = self.indent {
encoder.set_indent(n);
}
match self.inner.encode(&mut encoder) {
Ok(_) => Ok(()),
Err(_) => Err(fmt::Error),
}
}
}
impl FromStr for Json {
type Err = BuilderError;
fn from_str(s: &str) -> Result<Json, BuilderError> {
from_str(s)
}
}
#[cfg(test)]
mod tests;
-147
View File
@@ -1,147 +0,0 @@
// Benchmarks and tests that require private items
extern crate test;
use super::{from_str, Parser, Stack, StackElement};
use std::string;
use test::Bencher;
#[test]
fn test_stack() {
let mut stack = Stack::new();
assert!(stack.is_empty());
assert!(stack.is_empty());
assert!(!stack.last_is_index());
stack.push_index(0);
stack.bump_index();
assert!(stack.len() == 1);
assert!(stack.is_equal_to(&[StackElement::Index(1)]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.ends_with(&[StackElement::Index(1)]));
assert!(stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
stack.push_key("foo".to_string());
assert!(stack.len() == 2);
assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.ends_with(&[StackElement::Key("foo")]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
assert!(stack.get(1) == StackElement::Key("foo"));
stack.push_key("bar".to_string());
assert!(stack.len() == 3);
assert!(stack.is_equal_to(&[
StackElement::Index(1),
StackElement::Key("foo"),
StackElement::Key("bar")
]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[
StackElement::Index(1),
StackElement::Key("foo"),
StackElement::Key("bar")
]));
assert!(stack.ends_with(&[StackElement::Key("bar")]));
assert!(stack.ends_with(&[StackElement::Key("foo"), StackElement::Key("bar")]));
assert!(stack.ends_with(&[
StackElement::Index(1),
StackElement::Key("foo"),
StackElement::Key("bar")
]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
assert!(stack.get(1) == StackElement::Key("foo"));
assert!(stack.get(2) == StackElement::Key("bar"));
stack.pop();
assert!(stack.len() == 2);
assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.starts_with(&[StackElement::Index(1)]));
assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")]));
assert!(stack.ends_with(&[StackElement::Key("foo")]));
assert!(!stack.last_is_index());
assert!(stack.get(0) == StackElement::Index(1));
assert!(stack.get(1) == StackElement::Key("foo"));
}
#[bench]
fn bench_streaming_small(b: &mut Bencher) {
b.iter(|| {
let mut parser = Parser::new(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#
.chars(),
);
loop {
match parser.next() {
None => return,
_ => {}
}
}
});
}
#[bench]
fn bench_small(b: &mut Bencher) {
b.iter(|| {
let _ = from_str(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#,
);
});
}
fn big_json() -> string::String {
let mut src = "[\n".to_string();
for _ in 0..500 {
src.push_str(
r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \
[1,2,3]},"#,
);
}
src.push_str("{}]");
return src;
}
#[bench]
fn bench_streaming_large(b: &mut Bencher) {
let src = big_json();
b.iter(|| {
let mut parser = Parser::new(src.chars());
loop {
match parser.next() {
None => return,
_ => {}
}
}
});
}
#[bench]
fn bench_large(b: &mut Bencher) {
let src = big_json();
b.iter(|| {
let _ = from_str(&src);
});
}
-2
View File
@@ -25,7 +25,5 @@
mod collection_impls;
mod serialize;
pub mod json;
pub mod leb128;
pub mod opaque;
-10
View File
@@ -64,11 +64,6 @@ macro_rules! write_leb128 {
impl serialize::Encoder for Encoder {
type Error = !;
#[inline]
fn emit_unit(&mut self) -> EncodeResult {
Ok(())
}
#[inline]
fn emit_usize(&mut self, v: usize) -> EncodeResult {
write_leb128!(self, v, usize, write_usize_leb128)
@@ -419,11 +414,6 @@ macro_rules! file_encoder_write_leb128 {
impl serialize::Encoder for FileEncoder {
type Error = io::Error;
#[inline]
fn emit_unit(&mut self) -> FileEncodeResult {
Ok(())
}
#[inline]
fn emit_usize(&mut self, v: usize) -> FileEncodeResult {
file_encoder_write_leb128!(self, v, usize, write_usize_leb128)
+22 -161
View File
@@ -15,7 +15,6 @@ pub trait Encoder {
type Error;
// Primitive types:
fn emit_unit(&mut self) -> Result<(), Self::Error>;
fn emit_usize(&mut self, v: usize) -> Result<(), Self::Error>;
fn emit_u128(&mut self, v: u128) -> Result<(), Self::Error>;
fn emit_u64(&mut self, v: u64) -> Result<(), Self::Error>;
@@ -35,22 +34,8 @@ pub trait Encoder {
fn emit_str(&mut self, v: &str) -> Result<(), Self::Error>;
fn emit_raw_bytes(&mut self, s: &[u8]) -> Result<(), Self::Error>;
// Compound types:
#[inline]
fn emit_enum<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
fn emit_enum_variant<F>(
&mut self,
_v_name: &str,
v_id: usize,
_len: usize,
f: F,
) -> Result<(), Self::Error>
// Convenience for the derive macro:
fn emit_enum_variant<F>(&mut self, v_id: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
@@ -65,112 +50,9 @@ fn emit_enum_variant<F>(
// optimization that would otherwise be necessary here, likely due to the
// multiple levels of inlining and const-prop that are needed.
#[inline]
fn emit_fieldless_enum_variant<const ID: usize>(
&mut self,
_v_name: &str,
) -> Result<(), Self::Error> {
fn emit_fieldless_enum_variant<const ID: usize>(&mut self) -> Result<(), Self::Error> {
self.emit_usize(ID)
}
#[inline]
fn emit_enum_variant_arg<F>(&mut self, _first: bool, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_struct<F>(&mut self, _no_fields: bool, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_struct_field<F>(&mut self, _f_name: &str, _first: bool, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_tuple<F>(&mut self, _len: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_tuple_arg<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
// Specialized types:
fn emit_option<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_enum(f)
}
#[inline]
fn emit_option_none(&mut self) -> Result<(), Self::Error> {
self.emit_enum_variant("None", 0, 0, |_| Ok(()))
}
fn emit_option_some<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_enum_variant("Some", 1, 1, f)
}
fn emit_seq<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_usize(len)?;
f(self)
}
#[inline]
fn emit_seq_elt<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
fn emit_map<F>(&mut self, len: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
self.emit_usize(len)?;
f(self)
}
#[inline]
fn emit_map_elt_key<F>(&mut self, _idx: usize, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
#[inline]
fn emit_map_elt_val<F>(&mut self, f: F) -> Result<(), Self::Error>
where
F: FnOnce(&mut Self) -> Result<(), Self::Error>,
{
f(self)
}
}
// Note: all the methods in this trait are infallible, which may be surprising.
@@ -320,8 +202,8 @@ fn decode(d: &mut D) -> String {
}
impl<S: Encoder> Encodable<S> for () {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
@@ -330,8 +212,8 @@ fn decode(_: &mut D) -> () {}
}
impl<S: Encoder, T> Encodable<S> for PhantomData<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_unit()
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
@@ -362,12 +244,11 @@ fn decode(d: &mut D) -> Rc<T> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for [T] {
default fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_seq(self.len(), |s| {
for (i, e) in self.iter().enumerate() {
s.emit_seq_elt(i, |s| e.encode(s))?
}
Ok(())
})
s.emit_usize(self.len())?;
for e in self.iter() {
e.encode(s)?
}
Ok(())
}
}
@@ -450,10 +331,10 @@ fn decode(d: &mut D) -> Cow<'static, str> {
impl<S: Encoder, T: Encodable<S>> Encodable<S> for Option<T> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_option(|s| match *self {
None => s.emit_option_none(),
Some(ref v) => s.emit_option_some(|s| v.encode(s)),
})
match *self {
None => s.emit_enum_variant(0, |_| Ok(())),
Some(ref v) => s.emit_enum_variant(1, |s| v.encode(s)),
}
}
}
@@ -469,14 +350,10 @@ fn decode(d: &mut D) -> Option<T> {
impl<S: Encoder, T1: Encodable<S>, T2: Encodable<S>> Encodable<S> for Result<T1, T2> {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_enum(|s| match *self {
Ok(ref v) => {
s.emit_enum_variant("Ok", 0, 1, |s| s.emit_enum_variant_arg(true, |s| v.encode(s)))
}
Err(ref v) => {
s.emit_enum_variant("Err", 1, 1, |s| s.emit_enum_variant_arg(true, |s| v.encode(s)))
}
})
match *self {
Ok(ref v) => s.emit_enum_variant(0, |s| v.encode(s)),
Err(ref v) => s.emit_enum_variant(1, |s| v.encode(s)),
}
}
}
@@ -494,18 +371,6 @@ macro_rules! peel {
($name:ident, $($other:ident,)*) => (tuple! { $($other,)* })
}
/// Evaluates to the number of tokens passed to it.
///
/// Logarithmic counting: every one or two recursive expansions, the number of
/// tokens to count is divided by two, instead of being reduced by one.
/// Therefore, the recursion depth is the binary logarithm of the number of
/// tokens to count, and the expanded tree is likewise very small.
macro_rules! count {
($one:tt) => (1usize);
($($pairs:tt $_p:tt)*) => (count!($($pairs)*) << 1usize);
($odd:tt $($rest:tt)*) => (count!($($rest)*) | 1usize);
}
macro_rules! tuple {
() => ();
( $($name:ident,)+ ) => (
@@ -518,12 +383,8 @@ impl<S: Encoder, $($name: Encodable<S>),+> Encodable<S> for ($($name,)+) {
#[allow(non_snake_case)]
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
let ($(ref $name,)+) = *self;
let len: usize = count!($($name)+);
s.emit_tuple(len, |s| {
let mut i = 0;
$(s.emit_tuple_arg({ i+=1; i-1 }, |s| $name.encode(s))?;)+
Ok(())
})
$($name.encode(s)?;)+
Ok(())
}
}
peel! { $($name,)+ }
-978
View File
@@ -1,978 +0,0 @@
#![allow(rustc::internal)]
use json::ErrorCode::*;
use json::Json::*;
use json::JsonEvent::*;
use json::ParserError::*;
use json::{from_str, Encoder, EncoderError, Json, JsonEvent, Parser, StackElement};
use rustc_macros::Encodable;
use rustc_serialize::json;
use rustc_serialize::Encodable;
use std::collections::BTreeMap;
use std::io::prelude::*;
use std::string;
use Animal::*;
#[derive(Eq, PartialEq, Debug)]
struct OptionData {
opt: Option<usize>,
}
#[derive(PartialEq, Encodable, Debug)]
enum Animal {
Dog,
Frog(string::String, isize),
}
#[derive(PartialEq, Encodable, Debug)]
struct Inner {
a: (),
b: usize,
c: Vec<string::String>,
}
#[derive(PartialEq, Encodable, Debug)]
struct Outer {
inner: Vec<Inner>,
}
fn mk_object(items: &[(string::String, Json)]) -> Json {
let mut d = BTreeMap::new();
for item in items {
match *item {
(ref key, ref value) => {
d.insert((*key).clone(), (*value).clone());
}
}
}
Object(d)
}
#[test]
fn test_from_str_trait() {
let s = "null";
assert!(s.parse::<Json>().unwrap() == s.parse().unwrap());
}
#[test]
fn test_write_null() {
assert_eq!(Null.to_string(), "null");
assert_eq!(Null.pretty().to_string(), "null");
}
#[test]
fn test_write_i64() {
assert_eq!(U64(0).to_string(), "0");
assert_eq!(U64(0).pretty().to_string(), "0");
assert_eq!(U64(1234).to_string(), "1234");
assert_eq!(U64(1234).pretty().to_string(), "1234");
assert_eq!(I64(-5678).to_string(), "-5678");
assert_eq!(I64(-5678).pretty().to_string(), "-5678");
assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000");
assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000");
}
#[test]
fn test_write_f64() {
assert_eq!(F64(3.0).to_string(), "3.0");
assert_eq!(F64(3.0).pretty().to_string(), "3.0");
assert_eq!(F64(3.1).to_string(), "3.1");
assert_eq!(F64(3.1).pretty().to_string(), "3.1");
assert_eq!(F64(-1.5).to_string(), "-1.5");
assert_eq!(F64(-1.5).pretty().to_string(), "-1.5");
assert_eq!(F64(0.5).to_string(), "0.5");
assert_eq!(F64(0.5).pretty().to_string(), "0.5");
assert_eq!(F64(f64::NAN).to_string(), "null");
assert_eq!(F64(f64::NAN).pretty().to_string(), "null");
assert_eq!(F64(f64::INFINITY).to_string(), "null");
assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null");
assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null");
assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null");
}
#[test]
fn test_write_str() {
assert_eq!(String("".to_string()).to_string(), "\"\"");
assert_eq!(String("".to_string()).pretty().to_string(), "\"\"");
assert_eq!(String("homura".to_string()).to_string(), "\"homura\"");
assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\"");
}
#[test]
fn test_write_bool() {
assert_eq!(Boolean(true).to_string(), "true");
assert_eq!(Boolean(true).pretty().to_string(), "true");
assert_eq!(Boolean(false).to_string(), "false");
assert_eq!(Boolean(false).pretty().to_string(), "false");
}
#[test]
fn test_write_array() {
assert_eq!(Array(vec![]).to_string(), "[]");
assert_eq!(Array(vec![]).pretty().to_string(), "[]");
assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]");
assert_eq!(
Array(vec![Boolean(true)]).pretty().to_string(),
"\
[\n \
true\n\
]"
);
let long_test_array =
Array(vec![Boolean(false), Null, Array(vec![String("foo\nbar".to_string()), F64(3.5)])]);
assert_eq!(long_test_array.to_string(), "[false,null,[\"foo\\nbar\",3.5]]");
assert_eq!(
long_test_array.pretty().to_string(),
"\
[\n \
false,\n \
null,\n \
[\n \
\"foo\\nbar\",\n \
3.5\n \
]\n\
]"
);
}
#[test]
fn test_write_object() {
assert_eq!(mk_object(&[]).to_string(), "{}");
assert_eq!(mk_object(&[]).pretty().to_string(), "{}");
assert_eq!(mk_object(&[("a".to_string(), Boolean(true))]).to_string(), "{\"a\":true}");
assert_eq!(
mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(),
"\
{\n \
\"a\": true\n\
}"
);
let complex_obj = mk_object(&[(
"b".to_string(),
Array(vec![
mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
mk_object(&[("d".to_string(), String("".to_string()))]),
]),
)]);
assert_eq!(
complex_obj.to_string(),
"{\
\"b\":[\
{\"c\":\"\\f\\r\"},\
{\"d\":\"\"}\
]\
}"
);
assert_eq!(
complex_obj.pretty().to_string(),
"\
{\n \
\"b\": [\n \
{\n \
\"c\": \"\\f\\r\"\n \
},\n \
{\n \
\"d\": \"\"\n \
}\n \
]\n\
}"
);
let a = mk_object(&[
("a".to_string(), Boolean(true)),
(
"b".to_string(),
Array(vec![
mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]),
mk_object(&[("d".to_string(), String("".to_string()))]),
]),
),
]);
// We can't compare the strings directly because the object fields be
// printed in a different order.
assert_eq!(a.clone(), a.to_string().parse().unwrap());
assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap());
}
#[test]
fn test_write_enum() {
let animal = Dog;
assert_eq!(json::as_json(&animal).to_string(), "\"Dog\"");
assert_eq!(json::as_pretty_json(&animal).to_string(), "\"Dog\"");
let animal = Frog("Henry".to_string(), 349);
assert_eq!(
json::as_json(&animal).to_string(),
"{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"
);
assert_eq!(
json::as_pretty_json(&animal).to_string(),
"{\n \
\"variant\": \"Frog\",\n \
\"fields\": [\n \
\"Henry\",\n \
349\n \
]\n\
}"
);
}
macro_rules! check_encoder_for_simple {
($value:expr, $expected:expr) => {{
let s = json::as_json(&$value).to_string();
assert_eq!(s, $expected);
let s = json::as_pretty_json(&$value).to_string();
assert_eq!(s, $expected);
}};
}
#[test]
fn test_write_some() {
check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\"");
}
#[test]
fn test_write_none() {
check_encoder_for_simple!(None::<string::String>, "null");
}
#[test]
fn test_write_char() {
check_encoder_for_simple!('a', "\"a\"");
check_encoder_for_simple!('\t', "\"\\t\"");
check_encoder_for_simple!('\u{0000}', "\"\\u0000\"");
check_encoder_for_simple!('\u{001b}', "\"\\u001b\"");
check_encoder_for_simple!('\u{007f}', "\"\\u007f\"");
check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\"");
check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\"");
check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\"");
}
#[test]
fn test_trailing_characters() {
assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6)));
assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2)));
assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3)));
}
#[test]
fn test_read_identifiers() {
assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3)));
assert_eq!(from_str("null"), Ok(Null));
assert_eq!(from_str("true"), Ok(Boolean(true)));
assert_eq!(from_str("false"), Ok(Boolean(false)));
assert_eq!(from_str(" null "), Ok(Null));
assert_eq!(from_str(" true "), Ok(Boolean(true)));
assert_eq!(from_str(" false "), Ok(Boolean(false)));
}
#[test]
fn test_read_number() {
assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1)));
assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2)));
assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2)));
assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3)));
assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3)));
assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4)));
assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20)));
assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21)));
assert_eq!(from_str("3"), Ok(U64(3)));
assert_eq!(from_str("3.1"), Ok(F64(3.1)));
assert_eq!(from_str("-1.2"), Ok(F64(-1.2)));
assert_eq!(from_str("0.4"), Ok(F64(0.4)));
assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5)));
assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15)));
assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01)));
assert_eq!(from_str(" 3 "), Ok(U64(3)));
assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN)));
assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64)));
assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX)));
}
#[test]
fn test_read_str() {
assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2)));
assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5)));
assert_eq!(from_str("\"\""), Ok(String("".to_string())));
assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string())));
assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string())));
assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string())));
assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string())));
assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string())));
assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string())));
assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string())));
assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string())));
assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string())));
}
#[test]
fn test_read_array() {
assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[]"), Ok(Array(vec![])));
assert_eq!(from_str("[ ]"), Ok(Array(vec![])));
assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)])));
assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)])));
assert_eq!(from_str("[null]"), Ok(Array(vec![Null])));
assert_eq!(from_str("[3, 1]"), Ok(Array(vec![U64(3), U64(1)])));
assert_eq!(from_str("\n[3, 2]\n"), Ok(Array(vec![U64(3), U64(2)])));
assert_eq!(from_str("[2, [4, 1]]"), Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])])));
}
#[test]
fn test_read_object() {
assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2)));
assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3)));
assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2)));
assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5)));
assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6)));
assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6)));
assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7)));
assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8)));
assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8)));
assert_eq!(from_str("{}").unwrap(), mk_object(&[]));
assert_eq!(from_str("{\"a\": 3}").unwrap(), mk_object(&[("a".to_string(), U64(3))]));
assert_eq!(
from_str("{ \"a\": null, \"b\" : true }").unwrap(),
mk_object(&[("a".to_string(), Null), ("b".to_string(), Boolean(true))])
);
assert_eq!(
from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(),
mk_object(&[("a".to_string(), Null), ("b".to_string(), Boolean(true))])
);
assert_eq!(
from_str("{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(),
mk_object(&[("a".to_string(), F64(1.0)), ("b".to_string(), Array(vec![Boolean(true)]))])
);
assert_eq!(
from_str(
"{\
\"a\": 1.0, \
\"b\": [\
true,\
\"foo\\nbar\", \
{ \"c\": {\"d\": null} } \
]\
}"
)
.unwrap(),
mk_object(&[
("a".to_string(), F64(1.0)),
(
"b".to_string(),
Array(vec![
Boolean(true),
String("foo\nbar".to_string()),
mk_object(&[("c".to_string(), mk_object(&[("d".to_string(), Null)]))])
])
)
])
);
}
#[test]
fn test_multiline_errors() {
assert_eq!(from_str("{\n \"foo\":\n \"bar\""), Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
}
#[test]
fn test_find() {
let json_value = from_str("{\"dog\" : \"cat\"}").unwrap();
let found_str = json_value.find("dog");
assert!(found_str.unwrap().as_string().unwrap() == "cat");
}
#[test]
fn test_find_path() {
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.find_path(&["dog", "cat", "mouse"]);
assert!(found_str.unwrap().as_string().unwrap() == "cheese");
}
#[test]
fn test_search() {
let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap();
let found_str = json_value.search("mouse").and_then(|j| j.as_string());
assert!(found_str.unwrap() == "cheese");
}
#[test]
fn test_index() {
let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap();
let ref array = json_value["animals"];
assert_eq!(array[0].as_string().unwrap(), "dog");
assert_eq!(array[1].as_string().unwrap(), "cat");
assert_eq!(array[2].as_string().unwrap(), "mouse");
}
#[test]
fn test_is_object() {
let json_value = from_str("{}").unwrap();
assert!(json_value.is_object());
}
#[test]
fn test_as_object() {
let json_value = from_str("{}").unwrap();
let json_object = json_value.as_object();
assert!(json_object.is_some());
}
#[test]
fn test_is_array() {
let json_value = from_str("[1, 2, 3]").unwrap();
assert!(json_value.is_array());
}
#[test]
fn test_as_array() {
let json_value = from_str("[1, 2, 3]").unwrap();
let json_array = json_value.as_array();
let expected_length = 3;
assert!(json_array.is_some() && json_array.unwrap().len() == expected_length);
}
#[test]
fn test_is_string() {
let json_value = from_str("\"dog\"").unwrap();
assert!(json_value.is_string());
}
#[test]
fn test_as_string() {
let json_value = from_str("\"dog\"").unwrap();
let json_str = json_value.as_string();
let expected_str = "dog";
assert_eq!(json_str, Some(expected_str));
}
#[test]
fn test_is_number() {
let json_value = from_str("12").unwrap();
assert!(json_value.is_number());
}
#[test]
fn test_is_i64() {
let json_value = from_str("-12").unwrap();
assert!(json_value.is_i64());
let json_value = from_str("12").unwrap();
assert!(!json_value.is_i64());
let json_value = from_str("12.0").unwrap();
assert!(!json_value.is_i64());
}
#[test]
fn test_is_u64() {
let json_value = from_str("12").unwrap();
assert!(json_value.is_u64());
let json_value = from_str("-12").unwrap();
assert!(!json_value.is_u64());
let json_value = from_str("12.0").unwrap();
assert!(!json_value.is_u64());
}
#[test]
fn test_is_f64() {
let json_value = from_str("12").unwrap();
assert!(!json_value.is_f64());
let json_value = from_str("-12").unwrap();
assert!(!json_value.is_f64());
let json_value = from_str("12.0").unwrap();
assert!(json_value.is_f64());
let json_value = from_str("-12.0").unwrap();
assert!(json_value.is_f64());
}
#[test]
fn test_as_i64() {
let json_value = from_str("-12").unwrap();
let json_num = json_value.as_i64();
assert_eq!(json_num, Some(-12));
}
#[test]
fn test_as_u64() {
let json_value = from_str("12").unwrap();
let json_num = json_value.as_u64();
assert_eq!(json_num, Some(12));
}
#[test]
fn test_as_f64() {
let json_value = from_str("12.0").unwrap();
let json_num = json_value.as_f64();
assert_eq!(json_num, Some(12f64));
}
#[test]
fn test_is_boolean() {
let json_value = from_str("false").unwrap();
assert!(json_value.is_boolean());
}
#[test]
fn test_as_boolean() {
let json_value = from_str("false").unwrap();
let json_bool = json_value.as_boolean();
let expected_bool = false;
assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool);
}
#[test]
fn test_is_null() {
let json_value = from_str("null").unwrap();
assert!(json_value.is_null());
}
#[test]
fn test_as_null() {
let json_value = from_str("null").unwrap();
let json_null = json_value.as_null();
let expected_null = ();
assert!(json_null.is_some() && json_null.unwrap() == expected_null);
}
#[test]
fn test_encode_hashmap_with_numeric_key() {
use std::collections::HashMap;
use std::str::from_utf8;
let mut hm: HashMap<usize, bool> = HashMap::new();
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
let json_str = from_utf8(&mem_buf[..]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
}
}
#[test]
fn test_prettyencode_hashmap_with_numeric_key() {
use std::collections::HashMap;
use std::str::from_utf8;
let mut hm: HashMap<usize, bool> = HashMap::new();
hm.insert(1, true);
let mut mem_buf = Vec::new();
write!(&mut mem_buf, "{}", json::as_pretty_json(&hm)).unwrap();
let json_str = from_utf8(&mem_buf[..]).unwrap();
match from_str(json_str) {
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
_ => {} // it parsed and we are good to go
}
}
#[test]
fn test_prettyencoder_indent_level_param() {
use std::collections::BTreeMap;
use std::str::from_utf8;
let mut tree = BTreeMap::new();
tree.insert("hello".to_string(), String("guten tag".to_string()));
tree.insert("goodbye".to_string(), String("sayonara".to_string()));
let json = Array(
// The following layout below should look a lot like
// the pretty-printed JSON (indent * x)
vec![
// 0x
String("greetings".to_string()), // 1x
Object(tree), // 1x + 2x + 2x + 1x
], // 0x
// End JSON array (7 lines)
);
// Helper function for counting indents
fn indents(source: &str) -> usize {
let trimmed = source.trim_start_matches(' ');
source.len() - trimmed.len()
}
// Test up to 4 spaces of indents (more?)
for i in 0..4 {
let mut writer = Vec::new();
write!(&mut writer, "{}", json::as_pretty_json(&json).indent(i)).unwrap();
let printed = from_utf8(&writer[..]).unwrap();
// Check for indents at each line
let lines: Vec<&str> = printed.lines().collect();
assert_eq!(lines.len(), 7); // JSON should be 7 lines
assert_eq!(indents(lines[0]), 0 * i); // [
assert_eq!(indents(lines[1]), 1 * i); // "greetings",
assert_eq!(indents(lines[2]), 1 * i); // {
assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag",
assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara"
assert_eq!(indents(lines[5]), 1 * i); // },
assert_eq!(indents(lines[6]), 0 * i); // ]
// Finally, test that the pretty-printed JSON is valid
from_str(printed).ok().expect("Pretty-printed JSON is invalid!");
}
}
#[test]
fn test_hashmap_with_enum_key() {
use std::collections::HashMap;
#[derive(Encodable, Eq, Hash, PartialEq, Debug)]
enum Enum {
Foo,
#[allow(dead_code)]
Bar,
}
let mut map = HashMap::new();
map.insert(Enum::Foo, 0);
let result = json::encode(&map).unwrap();
assert_eq!(&result[..], r#"{"Foo":0}"#);
}
fn assert_stream_equal(src: &str, expected: Vec<(JsonEvent, Vec<StackElement<'_>>)>) {
let mut parser = Parser::new(src.chars());
let mut i = 0;
loop {
let evt = match parser.next() {
Some(e) => e,
None => {
break;
}
};
let (ref expected_evt, ref expected_stack) = expected[i];
if !parser.stack().is_equal_to(expected_stack) {
panic!("Parser stack is not equal to {:?}", expected_stack);
}
assert_eq!(&evt, expected_evt);
i += 1;
}
}
#[test]
fn test_streaming_parser() {
assert_stream_equal(
r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#,
vec![
(ObjectStart, vec![]),
(StringValue("bar".to_string()), vec![StackElement::Key("foo")]),
(ArrayStart, vec![StackElement::Key("array")]),
(U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]),
(U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]),
(U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]),
(U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]),
(U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]),
(U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]),
(ArrayEnd, vec![StackElement::Key("array")]),
(ArrayStart, vec![StackElement::Key("idents")]),
(NullValue, vec![StackElement::Key("idents"), StackElement::Index(0)]),
(BooleanValue(true), vec![StackElement::Key("idents"), StackElement::Index(1)]),
(BooleanValue(false), vec![StackElement::Key("idents"), StackElement::Index(2)]),
(ArrayEnd, vec![StackElement::Key("idents")]),
(ObjectEnd, vec![]),
],
);
}
fn last_event(src: &str) -> JsonEvent {
let mut parser = Parser::new(src.chars());
let mut evt = NullValue;
loop {
evt = match parser.next() {
Some(e) => e,
None => return evt,
}
}
}
#[test]
fn test_read_object_streaming() {
assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3)));
assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2)));
assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5)));
assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6)));
assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6)));
assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6)));
assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7)));
assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8)));
assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8)));
assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8)));
assert_stream_equal("{}", vec![(ObjectStart, vec![]), (ObjectEnd, vec![])]);
assert_stream_equal(
"{\"a\": 3}",
vec![
(ObjectStart, vec![]),
(U64Value(3), vec![StackElement::Key("a")]),
(ObjectEnd, vec![]),
],
);
assert_stream_equal(
"{ \"a\": null, \"b\" : true }",
vec![
(ObjectStart, vec![]),
(NullValue, vec![StackElement::Key("a")]),
(BooleanValue(true), vec![StackElement::Key("b")]),
(ObjectEnd, vec![]),
],
);
assert_stream_equal(
"{\"a\" : 1.0 ,\"b\": [ true ]}",
vec![
(ObjectStart, vec![]),
(F64Value(1.0), vec![StackElement::Key("a")]),
(ArrayStart, vec![StackElement::Key("b")]),
(BooleanValue(true), vec![StackElement::Key("b"), StackElement::Index(0)]),
(ArrayEnd, vec![StackElement::Key("b")]),
(ObjectEnd, vec![]),
],
);
assert_stream_equal(
r#"{
"a": 1.0,
"b": [
true,
"foo\nbar",
{ "c": {"d": null} }
]
}"#,
vec![
(ObjectStart, vec![]),
(F64Value(1.0), vec![StackElement::Key("a")]),
(ArrayStart, vec![StackElement::Key("b")]),
(BooleanValue(true), vec![StackElement::Key("b"), StackElement::Index(0)]),
(
StringValue("foo\nbar".to_string()),
vec![StackElement::Key("b"), StackElement::Index(1)],
),
(ObjectStart, vec![StackElement::Key("b"), StackElement::Index(2)]),
(
ObjectStart,
vec![StackElement::Key("b"), StackElement::Index(2), StackElement::Key("c")],
),
(
NullValue,
vec![
StackElement::Key("b"),
StackElement::Index(2),
StackElement::Key("c"),
StackElement::Key("d"),
],
),
(
ObjectEnd,
vec![StackElement::Key("b"), StackElement::Index(2), StackElement::Key("c")],
),
(ObjectEnd, vec![StackElement::Key("b"), StackElement::Index(2)]),
(ArrayEnd, vec![StackElement::Key("b")]),
(ObjectEnd, vec![]),
],
);
}
#[test]
fn test_read_array_streaming() {
assert_stream_equal("[]", vec![(ArrayStart, vec![]), (ArrayEnd, vec![])]);
assert_stream_equal("[ ]", vec![(ArrayStart, vec![]), (ArrayEnd, vec![])]);
assert_stream_equal(
"[true]",
vec![
(ArrayStart, vec![]),
(BooleanValue(true), vec![StackElement::Index(0)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"[ false ]",
vec![
(ArrayStart, vec![]),
(BooleanValue(false), vec![StackElement::Index(0)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"[null]",
vec![(ArrayStart, vec![]), (NullValue, vec![StackElement::Index(0)]), (ArrayEnd, vec![])],
);
assert_stream_equal(
"[3, 1]",
vec![
(ArrayStart, vec![]),
(U64Value(3), vec![StackElement::Index(0)]),
(U64Value(1), vec![StackElement::Index(1)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"\n[3, 2]\n",
vec![
(ArrayStart, vec![]),
(U64Value(3), vec![StackElement::Index(0)]),
(U64Value(2), vec![StackElement::Index(1)]),
(ArrayEnd, vec![]),
],
);
assert_stream_equal(
"[2, [4, 1]]",
vec![
(ArrayStart, vec![]),
(U64Value(2), vec![StackElement::Index(0)]),
(ArrayStart, vec![StackElement::Index(1)]),
(U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]),
(U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]),
(ArrayEnd, vec![StackElement::Index(1)]),
(ArrayEnd, vec![]),
],
);
assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2)));
assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3)));
assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4)));
assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4)));
}
#[test]
fn test_trailing_characters_streaming() {
assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5)));
assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6)));
assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2)));
assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3)));
}
#[test]
fn test_read_identifiers_streaming() {
assert_eq!(Parser::new("null".chars()).next(), Some(NullValue));
assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true)));
assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false)));
assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4)));
assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2)));
assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3)));
}
#[test]
fn test_to_json() {
use json::ToJson;
use std::collections::{BTreeMap, HashMap};
let array2 = Array(vec![U64(1), U64(2)]);
let array3 = Array(vec![U64(1), U64(2), U64(3)]);
let object = {
let mut tree_map = BTreeMap::new();
tree_map.insert("a".to_string(), U64(1));
tree_map.insert("b".to_string(), U64(2));
Object(tree_map)
};
assert_eq!(array2.to_json(), array2);
assert_eq!(object.to_json(), object);
assert_eq!(3_isize.to_json(), I64(3));
assert_eq!(4_i8.to_json(), I64(4));
assert_eq!(5_i16.to_json(), I64(5));
assert_eq!(6_i32.to_json(), I64(6));
assert_eq!(7_i64.to_json(), I64(7));
assert_eq!(8_usize.to_json(), U64(8));
assert_eq!(9_u8.to_json(), U64(9));
assert_eq!(10_u16.to_json(), U64(10));
assert_eq!(11_u32.to_json(), U64(11));
assert_eq!(12_u64.to_json(), U64(12));
assert_eq!(13.0_f32.to_json(), F64(13.0_f64));
assert_eq!(14.0_f64.to_json(), F64(14.0_f64));
assert_eq!(().to_json(), Null);
assert_eq!(f32::INFINITY.to_json(), Null);
assert_eq!(f64::NAN.to_json(), Null);
assert_eq!(true.to_json(), Boolean(true));
assert_eq!(false.to_json(), Boolean(false));
assert_eq!("abc".to_json(), String("abc".to_string()));
assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
assert_eq!((1_usize, 2_usize).to_json(), array2);
assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3);
assert_eq!([1_usize, 2_usize].to_json(), array2);
assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3);
assert_eq!((vec![1_usize, 2_usize]).to_json(), array2);
assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3);
let mut tree_map = BTreeMap::new();
tree_map.insert("a".to_string(), 1 as usize);
tree_map.insert("b".to_string(), 2);
assert_eq!(tree_map.to_json(), object);
let mut hash_map = HashMap::new();
hash_map.insert("a".to_string(), 1 as usize);
hash_map.insert("b".to_string(), 2);
assert_eq!(hash_map.to_json(), object);
assert_eq!(Some(15).to_json(), I64(15));
assert_eq!(Some(15 as usize).to_json(), U64(15));
assert_eq!(None::<isize>.to_json(), Null);
}
#[test]
fn test_encode_hashmap_with_arbitrary_key() {
use std::collections::HashMap;
#[derive(PartialEq, Eq, Hash, Encodable)]
struct ArbitraryType(usize);
let mut hm: HashMap<ArbitraryType, bool> = HashMap::new();
hm.insert(ArbitraryType(1), true);
let mut mem_buf = string::String::new();
let mut encoder = Encoder::new(&mut mem_buf);
let result = hm.encode(&mut encoder);
match result.unwrap_err() {
EncoderError::BadHashmapKey => (),
_ => panic!("expected bad hash map key"),
}
}
-4
View File
@@ -1207,10 +1207,6 @@ pub(crate) fn parse_branch_protection(
assert_incr_state: Option<String> = (None, parse_opt_string, [UNTRACKED],
"assert that the incremental cache is in given state: \
either `loaded` or `not-loaded`."),
ast_json: bool = (false, parse_bool, [UNTRACKED],
"print the AST as JSON and halt (default: no)"),
ast_json_noexpand: bool = (false, parse_bool, [UNTRACKED],
"print the pre-expansion AST as JSON and halt (default: no)"),
binary_dep_depinfo: bool = (false, parse_bool, [TRACKED],
"include artifacts (sysroot, crate dependencies) used during compilation in dep-info \
(default: no)"),
+2 -5
View File
@@ -307,11 +307,8 @@ pub fn is_top_level_module(self) -> bool {
impl<E: Encoder> Encodable<E> for DefId {
default fn encode(&self, s: &mut E) -> Result<(), E::Error> {
s.emit_struct(false, |s| {
s.emit_struct_field("krate", true, |s| self.krate.encode(s))?;
s.emit_struct_field("index", false, |s| self.index.encode(s))
})
self.krate.encode(s)?;
self.index.encode(s)
}
}
+75 -80
View File
@@ -196,24 +196,23 @@ fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
// an added assert statement
impl<S: Encoder> Encodable<S> for RealFileName {
fn encode(&self, encoder: &mut S) -> Result<(), S::Error> {
encoder.emit_enum(|encoder| match *self {
RealFileName::LocalPath(ref local_path) => {
encoder.emit_enum_variant("LocalPath", 0, 1, |encoder| {
encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?;
Ok(())
match *self {
RealFileName::LocalPath(ref local_path) => encoder.emit_enum_variant(0, |encoder| {
Ok({
local_path.encode(encoder)?;
})
}
}),
RealFileName::Remapped { ref local_path, ref virtual_name } => encoder
.emit_enum_variant("Remapped", 1, 2, |encoder| {
.emit_enum_variant(1, |encoder| {
// For privacy and build reproducibility, we must not embed host-dependant path in artifacts
// if they have been remapped by --remap-path-prefix
assert!(local_path.is_none());
encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?;
encoder.emit_enum_variant_arg(false, |encoder| virtual_name.encode(encoder))?;
local_path.encode(encoder)?;
virtual_name.encode(encoder)?;
Ok(())
}),
})
}
}
}
@@ -950,10 +949,8 @@ fn default() -> Self {
impl<E: Encoder> Encodable<E> for Span {
default fn encode(&self, s: &mut E) -> Result<(), E::Error> {
let span = self.data();
s.emit_struct(false, |s| {
s.emit_struct_field("lo", true, |s| span.lo.encode(s))?;
s.emit_struct_field("hi", false, |s| span.hi.encode(s))
})
span.lo.encode(s)?;
span.hi.encode(s)
}
}
impl<D: Decoder> Decodable<D> for Span {
@@ -1302,79 +1299,77 @@ pub struct SourceFile {
impl<S: Encoder> Encodable<S> for SourceFile {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct(false, |s| {
s.emit_struct_field("name", true, |s| self.name.encode(s))?;
s.emit_struct_field("src_hash", false, |s| self.src_hash.encode(s))?;
s.emit_struct_field("start_pos", false, |s| self.start_pos.encode(s))?;
s.emit_struct_field("end_pos", false, |s| self.end_pos.encode(s))?;
s.emit_struct_field("lines", false, |s| {
// We are always in `Lines` form by the time we reach here.
assert!(self.lines.borrow().is_lines());
self.lines(|lines| {
// Store the length.
s.emit_u32(lines.len() as u32)?;
self.name.encode(s)?;
self.src_hash.encode(s)?;
self.start_pos.encode(s)?;
self.end_pos.encode(s)?;
// Compute and store the difference list.
if lines.len() != 0 {
let max_line_length = if lines.len() == 1 {
0
} else {
lines
.array_windows()
.map(|&[fst, snd]| snd - fst)
.map(|bp| bp.to_usize())
.max()
.unwrap()
};
// We are always in `Lines` form by the time we reach here.
assert!(self.lines.borrow().is_lines());
self.lines(|lines| {
// Store the length.
s.emit_u32(lines.len() as u32)?;
let bytes_per_diff: usize = match max_line_length {
0..=0xFF => 1,
0x100..=0xFFFF => 2,
_ => 4,
};
// Compute and store the difference list.
if lines.len() != 0 {
let max_line_length = if lines.len() == 1 {
0
} else {
lines
.array_windows()
.map(|&[fst, snd]| snd - fst)
.map(|bp| bp.to_usize())
.max()
.unwrap()
};
// Encode the number of bytes used per diff.
s.emit_u8(bytes_per_diff as u8)?;
let bytes_per_diff: usize = match max_line_length {
0..=0xFF => 1,
0x100..=0xFFFF => 2,
_ => 4,
};
// Encode the first element.
lines[0].encode(s)?;
// Encode the number of bytes used per diff.
s.emit_u8(bytes_per_diff as u8)?;
// Encode the difference list.
let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
let num_diffs = lines.len() - 1;
let mut raw_diffs;
match bytes_per_diff {
1 => {
raw_diffs = Vec::with_capacity(num_diffs);
for diff in diff_iter {
raw_diffs.push(diff.0 as u8);
}
}
2 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
}
}
4 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u32).to_le_bytes());
}
}
_ => unreachable!(),
// Encode the first element.
lines[0].encode(s)?;
// Encode the difference list.
let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst);
let num_diffs = lines.len() - 1;
let mut raw_diffs;
match bytes_per_diff {
1 => {
raw_diffs = Vec::with_capacity(num_diffs);
for diff in diff_iter {
raw_diffs.push(diff.0 as u8);
}
s.emit_raw_bytes(&raw_diffs)?;
}
Ok(())
})
})?;
s.emit_struct_field("multibyte_chars", false, |s| self.multibyte_chars.encode(s))?;
s.emit_struct_field("non_narrow_chars", false, |s| self.non_narrow_chars.encode(s))?;
s.emit_struct_field("name_hash", false, |s| self.name_hash.encode(s))?;
s.emit_struct_field("normalized_pos", false, |s| self.normalized_pos.encode(s))?;
s.emit_struct_field("cnum", false, |s| self.cnum.encode(s))
})
2 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u16).to_le_bytes());
}
}
4 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u32).to_le_bytes());
}
}
_ => unreachable!(),
}
s.emit_raw_bytes(&raw_diffs)?;
}
Ok(())
})?;
self.multibyte_chars.encode(s)?;
self.non_narrow_chars.encode(s)?;
self.name_hash.encode(s)?;
self.normalized_pos.encode(s)?;
self.cnum.encode(s)
}
}
+1
View File
@@ -6,6 +6,7 @@ edition = "2021"
[dependencies]
bitflags = "1.2.1"
tracing = "0.1"
serde_json = "1.0.59"
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_macros = { path = "../rustc_macros" }
rustc_serialize = { path = "../rustc_serialize" }
+4 -3
View File
@@ -1,6 +1,7 @@
pub use Integer::*;
pub use Primitive::*;
use crate::json::{Json, ToJson};
use crate::spec::Target;
use std::convert::{TryFrom, TryInto};
@@ -13,7 +14,6 @@
use rustc_data_structures::intern::Interned;
use rustc_index::vec::{Idx, IndexVec};
use rustc_macros::HashStable_Generic;
use rustc_serialize::json::{Json, ToJson};
pub mod call;
@@ -166,7 +166,8 @@ pub fn parse(target: &Target) -> Result<TargetDataLayout, String> {
));
}
if dl.pointer_size.bits() != target.pointer_width.into() {
let target_pointer_width: u64 = target.pointer_width.into();
if dl.pointer_size.bits() != target_pointer_width {
return Err(format!(
"inconsistent target specification: \"data-layout\" claims \
pointers are {}-bit, while \"target-pointer-width\" is `{}`",
@@ -574,7 +575,7 @@ pub fn restrict_for_offset(self, offset: Size) -> Align {
}
/// A pair of alignments, ABI-mandated and preferred.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[derive(HashStable_Generic)]
pub struct AbiAndPrefAlign {
pub abi: Align,
+91
View File
@@ -0,0 +1,91 @@
use std::borrow::Cow;
use std::collections::BTreeMap;
pub use serde_json::Value as Json;
use serde_json::{Map, Number};
pub trait ToJson {
fn to_json(&self) -> Json;
}
impl ToJson for Json {
fn to_json(&self) -> Json {
self.clone()
}
}
macro_rules! to_json_impl_num {
($($t:ty), +) => (
$(impl ToJson for $t {
fn to_json(&self) -> Json {
Json::Number(Number::from(*self))
}
})+
)
}
to_json_impl_num! { isize, i8, i16, i32, i64, usize, u8, u16, u32, u64 }
impl ToJson for bool {
fn to_json(&self) -> Json {
Json::Bool(*self)
}
}
impl ToJson for str {
fn to_json(&self) -> Json {
Json::String(self.to_owned())
}
}
impl ToJson for String {
fn to_json(&self) -> Json {
Json::String(self.to_owned())
}
}
impl<'a> ToJson for Cow<'a, str> {
fn to_json(&self) -> Json {
Json::String(self.to_string())
}
}
impl<A: ToJson> ToJson for [A] {
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<A: ToJson> ToJson for Vec<A> {
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<'a, A: ToJson> ToJson for Cow<'a, [A]>
where
[A]: ToOwned,
{
fn to_json(&self) -> Json {
Json::Array(self.iter().map(|elt| elt.to_json()).collect())
}
}
impl<T: ToString, A: ToJson> ToJson for BTreeMap<T, A> {
fn to_json(&self) -> Json {
let mut d = Map::new();
for (key, value) in self {
d.insert(key.to_string(), value.to_json());
}
Json::Object(d)
}
}
impl<A: ToJson> ToJson for Option<A> {
fn to_json(&self) -> Json {
match *self {
None => Json::Null,
Some(ref value) => value.to_json(),
}
}
}
+1
View File
@@ -28,6 +28,7 @@
pub mod abi;
pub mod asm;
pub mod json;
pub mod spec;
#[cfg(test)]
@@ -40,8 +40,8 @@
//! but not gcc's. As a result rustc cannot link with C++ static libraries (#36710)
//! when linking in self-contained mode.
use crate::json::{Json, ToJson};
use crate::spec::LinkOutputKind;
use rustc_serialize::json::{Json, ToJson};
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::str::FromStr;
+61 -56
View File
@@ -35,11 +35,12 @@
//! to the list specified by the target, rather than replace.
use crate::abi::Endian;
use crate::json::{Json, ToJson};
use crate::spec::abi::{lookup as lookup_abi, Abi};
use crate::spec::crt_objects::{CrtObjects, CrtObjectsFallback};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_serialize::json::{Json, ToJson};
use rustc_span::symbol::{sym, Symbol};
use serde_json::Value;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::convert::TryFrom;
@@ -211,7 +212,7 @@ fn to_json(&self) -> Json {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Hash, Encodable, Decodable)]
#[derive(Clone, Copy, Debug, PartialEq, Hash)]
pub enum RelroLevel {
Full,
Partial,
@@ -255,7 +256,7 @@ fn to_json(&self) -> Json {
}
}
#[derive(Clone, Copy, Debug, PartialEq, Hash, Encodable, Decodable)]
#[derive(Clone, Copy, Debug, PartialEq, Hash)]
pub enum MergeFunctions {
Disabled,
Trampolines,
@@ -548,7 +549,7 @@ fn from_json(json: &Json) -> Result<Self, String> {
let object = json.as_object().ok_or_else(|| "expected a JSON object")?;
let kind = object
.get("kind")
.and_then(|o| o.as_string())
.and_then(|o| o.as_str())
.ok_or_else(|| "expected `kind` to be a string")?;
match kind {
"none" => Ok(StackProbeType::None),
@@ -592,11 +593,11 @@ fn to_json(&self) -> Json {
StackProbeType::Call => {
[(String::from("kind"), "call".to_json())].into_iter().collect()
}
StackProbeType::InlineOrCall { min_llvm_version_for_inline } => [
StackProbeType::InlineOrCall { min_llvm_version_for_inline: (maj, min, patch) } => [
(String::from("kind"), "inline-or-call".to_json()),
(
String::from("min-llvm-version-for-inline"),
min_llvm_version_for_inline.to_json(),
Json::Array(vec![maj.to_json(), min.to_json(), patch.to_json()]),
),
]
.into_iter()
@@ -1682,7 +1683,7 @@ pub fn max_atomic_width(&self) -> u64 {
}
/// Loads a target descriptor from a JSON object.
pub fn from_json(mut obj: Json) -> Result<(Target, TargetWarnings), String> {
pub fn from_json(obj: Json) -> Result<(Target, TargetWarnings), String> {
// While ugly, this code must remain this way to retain
// compatibility with existing JSON fields and the internal
// expected naming of the Target and TargetOptions structs.
@@ -1690,9 +1691,14 @@ pub fn from_json(mut obj: Json) -> Result<(Target, TargetWarnings), String> {
// are round-tripped through this code to catch cases where
// the JSON parser is not updated to match the structs.
let mut obj = match obj {
Value::Object(obj) => obj,
_ => return Err("Expected JSON object for target")?,
};
let mut get_req_field = |name: &str| {
obj.remove_key(name)
.and_then(|j| Json::as_string(&j).map(str::to_string))
obj.remove(name)
.and_then(|j| j.as_str().map(str::to_string))
.ok_or_else(|| format!("Field {} in target specification is required", name))
};
@@ -1711,31 +1717,31 @@ pub fn from_json(mut obj: Json) -> Result<(Target, TargetWarnings), String> {
macro_rules! key {
($key_name:ident) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_string(&j).map(str::to_string).map(Cow::from)) {
if let Some(s) = obj.remove(&name).and_then(|s| s.as_str().map(str::to_string).map(Cow::from)) {
base.$key_name = s;
}
} );
($key_name:ident = $json_name:expr) => ( {
let name = $json_name;
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_string(&j).map(str::to_string).map(Cow::from)) {
if let Some(s) = obj.remove(name).and_then(|s| s.as_str().map(str::to_string).map(Cow::from)) {
base.$key_name = s;
}
} );
($key_name:ident, bool) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_boolean(&j)) {
if let Some(s) = obj.remove(&name).and_then(|b| b.as_bool()) {
base.$key_name = s;
}
} );
($key_name:ident, u64) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_u64(&j)) {
if let Some(s) = obj.remove(&name).and_then(|j| Json::as_u64(&j)) {
base.$key_name = s;
}
} );
($key_name:ident, Option<u32>) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_u64(&j)) {
if let Some(s) = obj.remove(&name).and_then(|b| b.as_u64()) {
if s < 1 || s > 5 {
return Err("Not a valid DWARF version number".into());
}
@@ -1744,13 +1750,13 @@ macro_rules! key {
} );
($key_name:ident, Option<u64>) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(s) = obj.remove_key(&name).and_then(|j| Json::as_u64(&j)) {
if let Some(s) = obj.remove(&name).and_then(|b| b.as_u64()) {
base.$key_name = Some(s);
}
} );
($key_name:ident, MergeFunctions) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<MergeFunctions>() {
Ok(mergefunc) => base.$key_name = mergefunc,
_ => return Some(Err(format!("'{}' is not a valid value for \
@@ -1763,7 +1769,7 @@ macro_rules! key {
} );
($key_name:ident, RelocModel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<RelocModel>() {
Ok(relocation_model) => base.$key_name = relocation_model,
_ => return Some(Err(format!("'{}' is not a valid relocation model. \
@@ -1775,7 +1781,7 @@ macro_rules! key {
} );
($key_name:ident, CodeModel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<CodeModel>() {
Ok(code_model) => base.$key_name = Some(code_model),
_ => return Some(Err(format!("'{}' is not a valid code model. \
@@ -1787,7 +1793,7 @@ macro_rules! key {
} );
($key_name:ident, TlsModel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<TlsModel>() {
Ok(tls_model) => base.$key_name = tls_model,
_ => return Some(Err(format!("'{}' is not a valid TLS model. \
@@ -1799,7 +1805,7 @@ macro_rules! key {
} );
($key_name:ident, PanicStrategy) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s {
"unwind" => base.$key_name = PanicStrategy::Unwind,
"abort" => base.$key_name = PanicStrategy::Abort,
@@ -1812,7 +1818,7 @@ macro_rules! key {
} );
($key_name:ident, RelroLevel) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<RelroLevel>() {
Ok(level) => base.$key_name = level,
_ => return Some(Err(format!("'{}' is not a valid value for \
@@ -1824,7 +1830,7 @@ macro_rules! key {
} );
($key_name:ident, SplitDebuginfo) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<SplitDebuginfo>() {
Ok(level) => base.$key_name = level,
_ => return Some(Err(format!("'{}' is not a valid value for \
@@ -1836,10 +1842,10 @@ macro_rules! key {
} );
($key_name:ident, list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(j) = obj.remove_key(&name){
if let Some(v) = Json::as_array(&j) {
if let Some(j) = obj.remove(&name) {
if let Some(v) = j.as_array() {
base.$key_name = v.iter()
.map(|a| a.as_string().unwrap().to_string().into())
.map(|a| a.as_str().unwrap().to_string().into())
.collect();
} else {
incorrect_type.push(name)
@@ -1848,10 +1854,10 @@ macro_rules! key {
} );
($key_name:ident, opt_list) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(j) = obj.remove_key(&name) {
if let Some(v) = Json::as_array(&j) {
if let Some(j) = obj.remove(&name) {
if let Some(v) = j.as_array() {
base.$key_name = Some(v.iter()
.map(|a| a.as_string().unwrap().to_string().into())
.map(|a| a.as_str().unwrap().to_string().into())
.collect());
} else {
incorrect_type.push(name)
@@ -1860,15 +1866,15 @@ macro_rules! key {
} );
($key_name:ident, optional) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(o) = obj.remove_key(&name[..]) {
if let Some(o) = obj.remove(&name) {
base.$key_name = o
.as_string()
.as_str()
.map(|s| s.to_string().into());
}
} );
($key_name:ident, LldFlavor) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
if let Some(flavor) = LldFlavor::from_str(&s) {
base.$key_name = flavor;
} else {
@@ -1882,7 +1888,7 @@ macro_rules! key {
} );
($key_name:ident, LinkerFlavor) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match LinkerFlavor::from_str(s) {
Some(linker_flavor) => base.$key_name = linker_flavor,
_ => return Some(Err(format!("'{}' is not a valid value for linker-flavor. \
@@ -1893,7 +1899,7 @@ macro_rules! key {
} );
($key_name:ident, StackProbeType) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| match StackProbeType::from_json(&o) {
obj.remove(&name).and_then(|o| match StackProbeType::from_json(&o) {
Ok(v) => {
base.$key_name = v;
Some(Ok(()))
@@ -1905,10 +1911,10 @@ macro_rules! key {
} );
($key_name:ident, SanitizerSet) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(o) = obj.remove_key(&name[..]) {
if let Some(o) = obj.remove(&name) {
if let Some(a) = o.as_array() {
for s in a {
base.$key_name |= match s.as_string() {
base.$key_name |= match s.as_str() {
Some("address") => SanitizerSet::ADDRESS,
Some("cfi") => SanitizerSet::CFI,
Some("leak") => SanitizerSet::LEAK,
@@ -1929,7 +1935,7 @@ macro_rules! key {
($key_name:ident, crt_objects_fallback) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match s.parse::<CrtObjectsFallback>() {
Ok(fallback) => base.$key_name = Some(fallback),
_ => return Some(Err(format!("'{}' is not a valid CRT objects fallback. \
@@ -1940,7 +1946,7 @@ macro_rules! key {
} );
($key_name:ident, link_objects) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(val) = obj.remove_key(&name[..]) {
if let Some(val) = obj.remove(&name) {
let obj = val.as_object().ok_or_else(|| format!("{}: expected a \
JSON object with fields per CRT object kind.", name))?;
let mut args = CrtObjects::new();
@@ -1955,7 +1961,7 @@ macro_rules! key {
format!("{}.{}: expected a JSON array", name, k)
)?.iter().enumerate()
.map(|(i,s)| {
let s = s.as_string().ok_or_else(||
let s = s.as_str().ok_or_else(||
format!("{}.{}[{}]: expected a JSON string", name, k, i))?;
Ok(s.to_string().into())
})
@@ -1968,7 +1974,7 @@ macro_rules! key {
} );
($key_name:ident, link_args) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(val) = obj.remove_key(&name[..]) {
if let Some(val) = obj.remove(&name) {
let obj = val.as_object().ok_or_else(|| format!("{}: expected a \
JSON object with fields per linker-flavor.", name))?;
let mut args = LinkArgs::new();
@@ -1982,7 +1988,7 @@ macro_rules! key {
format!("{}.{}: expected a JSON array", name, k)
)?.iter().enumerate()
.map(|(i,s)| {
let s = s.as_string().ok_or_else(||
let s = s.as_str().ok_or_else(||
format!("{}.{}[{}]: expected a JSON string", name, k, i))?;
Ok(s.to_string().into())
})
@@ -1995,10 +2001,10 @@ macro_rules! key {
} );
($key_name:ident, env) => ( {
let name = (stringify!($key_name)).replace("_", "-");
if let Some(o) = obj.remove_key(&name[..]) {
if let Some(o) = obj.remove(&name) {
if let Some(a) = o.as_array() {
for o in a {
if let Some(s) = o.as_string() {
if let Some(s) = o.as_str() {
let p = s.split('=').collect::<Vec<_>>();
if p.len() == 2 {
let k = p[0].to_string();
@@ -2014,7 +2020,7 @@ macro_rules! key {
} );
($key_name:ident, Option<Abi>) => ( {
let name = (stringify!($key_name)).replace("_", "-");
obj.remove_key(&name[..]).and_then(|o| o.as_string().and_then(|s| {
obj.remove(&name).and_then(|o| o.as_str().and_then(|s| {
match lookup_abi(s) {
Some(abi) => base.$key_name = Some(abi),
_ => return Some(Err(format!("'{}' is not a valid value for abi", s))),
@@ -2023,28 +2029,28 @@ macro_rules! key {
})).unwrap_or(Ok(()))
} );
($key_name:ident, TargetFamilies) => ( {
if let Some(value) = obj.remove_key("target-family") {
if let Some(v) = Json::as_array(&value) {
if let Some(value) = obj.remove("target-family") {
if let Some(v) = value.as_array() {
base.$key_name = v.iter()
.map(|a| a.as_string().unwrap().to_string().into())
.map(|a| a.as_str().unwrap().to_string().into())
.collect();
} else if let Some(v) = Json::as_string(&value) {
} else if let Some(v) = value.as_str() {
base.$key_name = vec![v.to_string().into()].into();
}
}
} );
}
if let Some(j) = obj.remove_key("target-endian") {
if let Some(s) = Json::as_string(&j) {
if let Some(j) = obj.remove("target-endian") {
if let Some(s) = j.as_str() {
base.endian = s.parse()?;
} else {
incorrect_type.push("target-endian".into())
}
}
if let Some(fp) = obj.remove_key("frame-pointer") {
if let Some(s) = Json::as_string(&fp) {
if let Some(fp) = obj.remove("frame-pointer") {
if let Some(s) = fp.as_str() {
base.frame_pointer = s
.parse()
.map_err(|()| format!("'{}' is not a valid value for frame-pointer", s))?;
@@ -2156,8 +2162,8 @@ macro_rules! key {
// This can cause unfortunate ICEs later down the line.
return Err("may not set is_builtin for targets not built-in".into());
}
// Each field should have been read using `Json::remove_key` so any keys remaining are unused.
let remaining_keys = obj.as_object().ok_or("Expected JSON object for target")?.keys();
// Each field should have been read using `Json::remove` so any keys remaining are unused.
let remaining_keys = obj.keys();
Ok((
base,
TargetWarnings { unused_fields: remaining_keys.cloned().collect(), incorrect_type },
@@ -2189,13 +2195,12 @@ pub fn search(
target_triple: &TargetTriple,
sysroot: &Path,
) -> Result<(Target, TargetWarnings), String> {
use rustc_serialize::json;
use std::env;
use std::fs;
fn load_file(path: &Path) -> Result<(Target, TargetWarnings), String> {
let contents = fs::read_to_string(path).map_err(|e| e.to_string())?;
let obj = json::from_str(&contents).map_err(|e| e.to_string())?;
let obj = serde_json::from_str(&contents).map_err(|e| e.to_string())?;
Target::from_json(obj)
}
@@ -2248,7 +2253,7 @@ fn load_file(path: &Path) -> Result<(Target, TargetWarnings), String> {
impl ToJson for Target {
fn to_json(&self) -> Json {
let mut d = BTreeMap::new();
let mut d = serde_json::Map::new();
let default: TargetOptions = Default::default();
macro_rules! target_val {
+3 -5
View File
@@ -1,10 +1,8 @@
use crate::spec::Target;
use rustc_serialize::json::Json;
use std::str::FromStr;
#[test]
fn report_unused_fields() {
let json = Json::from_str(
let json = serde_json::from_str(
r#"
{
"arch": "powerpc64",
@@ -23,7 +21,7 @@ fn report_unused_fields() {
#[test]
fn report_incorrect_json_type() {
let json = Json::from_str(
let json = serde_json::from_str(
r#"
{
"arch": "powerpc64",
@@ -42,7 +40,7 @@ fn report_incorrect_json_type() {
#[test]
fn no_warnings_for_valid_target() {
let json = Json::from_str(
let json = serde_json::from_str(
r#"
{
"arch": "powerpc64",
+110 -112
View File
@@ -716,118 +716,116 @@ impl<I: Interner, E: TyEncoder> Encodable<E> for TyKind<I>
I::AllocId: Encodable<E>,
{
fn encode(&self, e: &mut E) -> Result<(), <E as rustc_serialize::Encoder>::Error> {
rustc_serialize::Encoder::emit_enum(e, |e| {
let disc = discriminant(self);
match self {
Bool => e.emit_enum_variant("Bool", disc, 0, |_| Ok(())),
Char => e.emit_enum_variant("Char", disc, 0, |_| Ok(())),
Int(i) => e.emit_enum_variant("Int", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| i.encode(e))?;
Ok(())
}),
Uint(u) => e.emit_enum_variant("Uint", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| u.encode(e))?;
Ok(())
}),
Float(f) => e.emit_enum_variant("Float", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| f.encode(e))?;
Ok(())
}),
Adt(adt, substs) => e.emit_enum_variant("Adt", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| adt.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
Foreign(def_id) => e.emit_enum_variant("Foreign", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
Ok(())
}),
Str => e.emit_enum_variant("Str", disc, 0, |_| Ok(())),
Array(t, c) => e.emit_enum_variant("Array", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| t.encode(e))?;
e.emit_enum_variant_arg(false, |e| c.encode(e))?;
Ok(())
}),
Slice(t) => e.emit_enum_variant("Slice", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| t.encode(e))?;
Ok(())
}),
RawPtr(tam) => e.emit_enum_variant("RawPtr", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| tam.encode(e))?;
Ok(())
}),
Ref(r, t, m) => e.emit_enum_variant("Ref", disc, 3, |e| {
e.emit_enum_variant_arg(true, |e| r.encode(e))?;
e.emit_enum_variant_arg(false, |e| t.encode(e))?;
e.emit_enum_variant_arg(false, |e| m.encode(e))?;
Ok(())
}),
FnDef(def_id, substs) => e.emit_enum_variant("FnDef", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
FnPtr(polyfnsig) => e.emit_enum_variant("FnPtr", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| polyfnsig.encode(e))?;
Ok(())
}),
Dynamic(l, r) => e.emit_enum_variant("Dynamic", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| l.encode(e))?;
e.emit_enum_variant_arg(false, |e| r.encode(e))?;
Ok(())
}),
Closure(def_id, substs) => e.emit_enum_variant("Closure", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
Generator(def_id, substs, m) => e.emit_enum_variant("Generator", disc, 3, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
e.emit_enum_variant_arg(false, |e| m.encode(e))?;
Ok(())
}),
GeneratorWitness(b) => e.emit_enum_variant("GeneratorWitness", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| b.encode(e))?;
Ok(())
}),
Never => e.emit_enum_variant("Never", disc, 0, |_| Ok(())),
Tuple(substs) => e.emit_enum_variant("Tuple", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| substs.encode(e))?;
Ok(())
}),
Projection(p) => e.emit_enum_variant("Projection", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| p.encode(e))?;
Ok(())
}),
Opaque(def_id, substs) => e.emit_enum_variant("Opaque", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| def_id.encode(e))?;
e.emit_enum_variant_arg(false, |e| substs.encode(e))?;
Ok(())
}),
Param(p) => e.emit_enum_variant("Param", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| p.encode(e))?;
Ok(())
}),
Bound(d, b) => e.emit_enum_variant("Bound", disc, 2, |e| {
e.emit_enum_variant_arg(true, |e| d.encode(e))?;
e.emit_enum_variant_arg(false, |e| b.encode(e))?;
Ok(())
}),
Placeholder(p) => e.emit_enum_variant("Placeholder", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| p.encode(e))?;
Ok(())
}),
Infer(i) => e.emit_enum_variant("Infer", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| i.encode(e))?;
Ok(())
}),
Error(d) => e.emit_enum_variant("Error", disc, 1, |e| {
e.emit_enum_variant_arg(true, |e| d.encode(e))?;
Ok(())
}),
}
})
let disc = discriminant(self);
match self {
Bool => e.emit_enum_variant(disc, |_| Ok(())),
Char => e.emit_enum_variant(disc, |_| Ok(())),
Int(i) => e.emit_enum_variant(disc, |e| {
i.encode(e)?;
Ok(())
}),
Uint(u) => e.emit_enum_variant(disc, |e| {
u.encode(e)?;
Ok(())
}),
Float(f) => e.emit_enum_variant(disc, |e| {
f.encode(e)?;
Ok(())
}),
Adt(adt, substs) => e.emit_enum_variant(disc, |e| {
adt.encode(e)?;
substs.encode(e)?;
Ok(())
}),
Foreign(def_id) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
Ok(())
}),
Str => e.emit_enum_variant(disc, |_| Ok(())),
Array(t, c) => e.emit_enum_variant(disc, |e| {
t.encode(e)?;
c.encode(e)?;
Ok(())
}),
Slice(t) => e.emit_enum_variant(disc, |e| {
t.encode(e)?;
Ok(())
}),
RawPtr(tam) => e.emit_enum_variant(disc, |e| {
tam.encode(e)?;
Ok(())
}),
Ref(r, t, m) => e.emit_enum_variant(disc, |e| {
r.encode(e)?;
t.encode(e)?;
m.encode(e)?;
Ok(())
}),
FnDef(def_id, substs) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
Ok(())
}),
FnPtr(polyfnsig) => e.emit_enum_variant(disc, |e| {
polyfnsig.encode(e)?;
Ok(())
}),
Dynamic(l, r) => e.emit_enum_variant(disc, |e| {
l.encode(e)?;
r.encode(e)?;
Ok(())
}),
Closure(def_id, substs) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
Ok(())
}),
Generator(def_id, substs, m) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
m.encode(e)?;
Ok(())
}),
GeneratorWitness(b) => e.emit_enum_variant(disc, |e| {
b.encode(e)?;
Ok(())
}),
Never => e.emit_enum_variant(disc, |_| Ok(())),
Tuple(substs) => e.emit_enum_variant(disc, |e| {
substs.encode(e)?;
Ok(())
}),
Projection(p) => e.emit_enum_variant(disc, |e| {
p.encode(e)?;
Ok(())
}),
Opaque(def_id, substs) => e.emit_enum_variant(disc, |e| {
def_id.encode(e)?;
substs.encode(e)?;
Ok(())
}),
Param(p) => e.emit_enum_variant(disc, |e| {
p.encode(e)?;
Ok(())
}),
Bound(d, b) => e.emit_enum_variant(disc, |e| {
d.encode(e)?;
b.encode(e)?;
Ok(())
}),
Placeholder(p) => e.emit_enum_variant(disc, |e| {
p.encode(e)?;
Ok(())
}),
Infer(i) => e.emit_enum_variant(disc, |e| {
i.encode(e)?;
Ok(())
}),
Error(d) => e.emit_enum_variant(disc, |e| {
d.encode(e)?;
Ok(())
}),
}
}
}
+2 -2
View File
@@ -3,8 +3,8 @@
#![allow(unused_imports)]
#![feature(rustc_private)]
extern crate rustc_serialize;
use rustc_serialize::json::Object;
extern crate libc;
use libc::c_void;
pub fn main() {
println!("Hello world!");
+52 -14
View File
@@ -3,42 +3,80 @@
#![allow(unused_must_use)]
#![allow(dead_code)]
#![allow(unused_imports)]
#![feature(rustc_private)]
extern crate rustc_macros;
extern crate rustc_serialize;
use std::fmt;
use std::io::prelude::*;
use std::io::Cursor;
use std::slice;
use std::marker::PhantomData;
trait Encoder {
type Error;
}
trait Encodable<S: Encoder> {
fn encode(&self, s: &mut S) -> Result<(), S::Error>;
}
struct JsonEncoder<'a>(PhantomData<&'a mut ()>);
impl Encoder for JsonEncoder<'_> {
type Error = ();
}
struct AsJson<'a, T> {
inner: &'a T,
}
impl<'a, T: for<'r> Encodable<JsonEncoder<'r>>> fmt::Display for AsJson<'a, T> {
/// Encodes a json value into a string
fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
Ok(())
}
}
fn as_json<T>(t: &T) -> AsJson<'_, T> {
AsJson { inner: t }
}
struct OpaqueEncoder(Vec<u8>);
impl Encoder for OpaqueEncoder {
type Error = ();
}
use rustc_macros::Encodable;
use rustc_serialize::json;
use rustc_serialize::opaque;
use rustc_serialize::{Encodable, Encoder};
#[derive(Encodable)]
struct Foo {
baz: bool,
}
#[derive(Encodable)]
impl<S: Encoder> Encodable<S> for Foo {
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
struct Bar {
froboz: usize,
}
impl<S: Encoder> Encodable<S> for Bar {
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
enum WireProtocol {
JSON,
Opaque,
// ...
}
fn encode_json<T: for<'a> Encodable<json::Encoder<'a>>>(val: &T, wr: &mut Cursor<Vec<u8>>) {
write!(wr, "{}", json::as_json(val));
fn encode_json<T: for<'a> Encodable<JsonEncoder<'a>>>(val: &T, wr: &mut Cursor<Vec<u8>>) {
write!(wr, "{}", as_json(val));
}
fn encode_opaque<T: Encodable<opaque::Encoder>>(val: &T, wr: Vec<u8>) {
let mut encoder = opaque::Encoder::new(wr);
fn encode_opaque<T: Encodable<OpaqueEncoder>>(val: &T, wr: Vec<u8>) {
let mut encoder = OpaqueEncoder(wr);
val.encode(&mut encoder);
}
+35 -8
View File
@@ -3,21 +3,48 @@
#![allow(unused_imports)]
#![allow(unused_must_use)]
// pretty-expanded FIXME #23616
#![feature(rustc_private)]
extern crate rustc_serialize;
use rustc_serialize::json;
use rustc_serialize::{Encodable, Encoder};
use std::fmt;
use std::marker::PhantomData;
struct Foo<T: for<'a> Encodable<json::Encoder<'a>>> {
trait Encoder {
type Error;
}
trait Encodable<S: Encoder> {
fn encode(&self, s: &mut S) -> Result<(), S::Error>;
}
impl<S: Encoder> Encodable<S> for i32 {
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
struct JsonEncoder<'a>(PhantomData<&'a mut ()>);
impl Encoder for JsonEncoder<'_> {
type Error = ();
}
fn encode_json<T: for<'r> Encodable<JsonEncoder<'r>>>(
object: &T,
) -> Result<String, ()> {
let s = String::new();
{
let mut encoder = JsonEncoder(PhantomData);
object.encode(&mut encoder)?;
}
Ok(s)
}
struct Foo<T: for<'a> Encodable<JsonEncoder<'a>>> {
v: T,
}
impl<T: for<'a> Encodable<json::Encoder<'a>>> Drop for Foo<T> {
impl<T: for<'a> Encodable<JsonEncoder<'a>>> Drop for Foo<T> {
fn drop(&mut self) {
json::encode(&self.v);
encode_json(&self.v);
}
}
+22 -11
View File
@@ -2,27 +2,38 @@
#![allow(non_camel_case_types)]
#![allow(dead_code)]
#![feature(rustc_private)]
extern crate rustc_serialize;
use std::collections::HashMap;
use rustc_serialize::json::{self, Json};
use std::collections::{BTreeMap, HashMap};
use std::option;
#[derive(Clone, Debug)]
enum Json {
I64(i64),
U64(u64),
F64(f64),
String(String),
Boolean(bool),
Array(Array),
Object(Object),
Null,
}
type Array = Vec<Json>;
type Object = BTreeMap<String, Json>;
enum object {
bool_value(bool),
int_value(i64),
}
fn lookup(table: json::Object, key: String, default: String) -> String
fn lookup(table: Object, key: String, default: String) -> String
{
match table.get(&key) {
option::Option::Some(&Json::String(ref s)) => {
s.to_string()
}
option::Option::Some(value) => {
println!("{} was expected to be a string but is a {}", key, value);
println!("{} was expected to be a string but is a {:?}", key, value);
default
}
option::Option::None => {
@@ -31,7 +42,7 @@ fn lookup(table: json::Object, key: String, default: String) -> String
}
}
fn add_interface(_store: isize, managed_ip: String, data: json::Json) -> (String, object)
fn add_interface(_store: isize, managed_ip: String, data: Json) -> (String, object)
{
match &data {
&Json::Object(ref interface) => {
@@ -43,13 +54,13 @@ fn add_interface(_store: isize, managed_ip: String, data: json::Json) -> (String
(label, object::bool_value(false))
}
_ => {
println!("Expected dict for {} interfaces, found {}", managed_ip, data);
println!("Expected dict for {} interfaces, found {:?}", managed_ip, data);
("gnos:missing-interface".to_string(), object::bool_value(true))
}
}
}
fn add_interfaces(store: isize, managed_ip: String, device: HashMap<String, json::Json>)
fn add_interfaces(store: isize, managed_ip: String, device: HashMap<String, Json>)
-> Vec<(String, object)> {
match device["interfaces"] {
Json::Array(ref interfaces) =>
@@ -60,7 +71,7 @@ fn add_interfaces(store: isize, managed_ip: String, device: HashMap<String, json
}
_ =>
{
println!("Expected list for {} interfaces, found {}", managed_ip,
println!("Expected list for {} interfaces, found {:?}", managed_ip,
device["interfaces"]);
Vec::new()
}
-56
View File
@@ -1,56 +0,0 @@
// Test that AST json serialization doesn't ICE (#63728).
// revisions: expand noexpand
//[expand] compile-flags: -Zast-json
//[noexpand] compile-flags: -Zast-json-noexpand
// check-pass
// dont-check-compiler-stdout - don't check for any AST change.
enum V {
A(i32),
B { f: [i64; 3 + 4] }
}
trait X {
type Output;
fn read(&self) -> Self::Output;
fn write(&mut self, _: Self::Output);
}
macro_rules! call_println {
($y:ident) => { println!("{}", $y) }
}
fn main() {
let x: (i32) = 35;
let y = x as i64<> + 5;
call_println!(y);
struct A;
}
// Regressions tests for issues #78398 and #78510 (captured tokens in associated and foreign items)
struct S;
macro_rules! mac_extern {
($i:item) => {
extern "C" { $i }
}
}
macro_rules! mac_assoc {
($i:item) => {
impl S { $i }
trait Bar { $i }
}
}
mac_extern! {
fn foo();
}
mac_assoc! {
fn foo() {}
}
@@ -1,10 +0,0 @@
// Check that AST json printing works.
#![crate_type = "lib"]
// check-pass
// compile-flags: -Zast-json-noexpand
// normalize-stdout-test ":\d+" -> ":0"
// Only include a single item to reduce how often the test output needs
// updating.
extern crate core;
@@ -1 +0,0 @@
{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"variant":"Ast","fields":[{"id":0,"kind":{"variant":"Lit","fields":[{"token":{"kind":"Str","symbol":"lib","suffix":null},"kind":{"variant":"Str","fields":["lib","Cooked"]},"span":{"lo":0,"hi":0}}]},"span":{"lo":0,"hi":0},"attrs":{"0":null},"tokens":{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}}]}]},"tokens":null},{"0":[[{"variant":"Token","fields":[{"kind":"Pound","span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Not","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Delimited","fields":[{"open":{"lo":0,"hi":0},"close":{"lo":0,"hi":0}},"Bracket",{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate_type",false]},"span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":"Eq","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"Alone"]]}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"spans":{"inner_span":{"lo":0,"hi":0},"inject_use_span":{"lo":0,"hi":0}},"id":0,"is_placeholder":false}
-10
View File
@@ -1,10 +0,0 @@
// Check that AST json printing works.
#![crate_type = "lib"]
// check-pass
// compile-flags: -Zast-json
// normalize-stdout-test ":\d+" -> ":0"
// Only include a single item to reduce how often the test output needs
// updating.
extern crate core;
@@ -1 +0,0 @@
{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"variant":"Ast","fields":[{"id":0,"kind":{"variant":"Lit","fields":[{"token":{"kind":"Str","symbol":"lib","suffix":null},"kind":{"variant":"Str","fields":["lib","Cooked"]},"span":{"lo":0,"hi":0}}]},"span":{"lo":0,"hi":0},"attrs":{"0":null},"tokens":{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}}]}]},"tokens":null},{"0":[[{"variant":"Token","fields":[{"kind":"Pound","span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Not","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Delimited","fields":[{"open":{"lo":0,"hi":0},"close":{"lo":0,"hi":0}},"Bracket",{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate_type",false]},"span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":"Eq","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"Alone"]]}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null},null]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"rust_2015","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null},null]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"spans":{"inner_span":{"lo":0,"hi":0},"inject_use_span":{"lo":0,"hi":0}},"id":0,"is_placeholder":false}