From ac795c63958f24407641b9c43bed160e8a24f67c Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Thu, 4 Jun 2015 19:48:35 -0700 Subject: [PATCH 1/6] Remove a number of libraries to be imported as subtrees --- src/libgetopts/lib.rs | 1612 --------- src/libserialize/collection_impls.rs | 253 -- src/libserialize/hex.rs | 226 -- src/libserialize/json.rs | 4024 ----------------------- src/libserialize/lib.rs | 58 - src/libserialize/serialize.rs | 637 ---- src/libterm/lib.rs | 256 -- src/libterm/terminfo/mod.rs | 235 -- src/libterm/terminfo/parm.rs | 703 ---- src/libterm/terminfo/parser/compiled.rs | 358 -- src/libterm/terminfo/searcher.rs | 119 - src/libterm/win.rs | 199 -- 12 files changed, 8680 deletions(-) delete mode 100644 src/libgetopts/lib.rs delete mode 100644 src/libserialize/collection_impls.rs delete mode 100644 src/libserialize/hex.rs delete mode 100644 src/libserialize/json.rs delete mode 100644 src/libserialize/lib.rs delete mode 100644 src/libserialize/serialize.rs delete mode 100644 src/libterm/lib.rs delete mode 100644 src/libterm/terminfo/mod.rs delete mode 100644 src/libterm/terminfo/parm.rs delete mode 100644 src/libterm/terminfo/parser/compiled.rs delete mode 100644 src/libterm/terminfo/searcher.rs delete mode 100644 src/libterm/win.rs diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs deleted file mode 100644 index 48649a3143464..0000000000000 --- a/src/libgetopts/lib.rs +++ /dev/null @@ -1,1612 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Simple getopt alternative. -//! -//! Construct a vector of options, either by using `reqopt`, `optopt`, and `optflag` -//! or by building them from components yourself, and pass them to `getopts`, -//! along with a vector of actual arguments (not including `argv[0]`). You'll -//! either get a failure code back, or a match. You'll have to verify whether -//! the amount of 'free' arguments in the match is what you expect. Use `opt_*` -//! accessors to get argument values out of the matches object. -//! -//! Single-character options are expected to appear on the command line with a -//! single preceding dash; multiple-character options are expected to be -//! proceeded by two dashes. Options that expect an argument accept their -//! argument following either a space or an equals sign. Single-character -//! options don't require the space. -//! -//! # Example -//! -//! The following example shows simple command line parsing for an application -//! that requires an input file to be specified, accepts an optional output -//! file name following `-o`, and accepts both `-h` and `--help` as optional flags. -//! -//! ```{.rust} -//! extern crate getopts; -//! use getopts::{optopt,optflag,getopts,OptGroup,usage}; -//! use std::os; -//! -//! fn do_work(inp: &str, out: Option) { -//! println!("{}", inp); -//! match out { -//! Some(x) => println!("{}", x), -//! None => println!("No Output"), -//! } -//! } -//! -//! fn print_usage(program: &str, opts: &[OptGroup]) { -//! let brief = format!("Usage: {} [options]", program); -//! print!("{}", usage(brief, opts)); -//! } -//! -//! fn main() { -//! let args: Vec = os::args(); -//! -//! let program = args[0].clone(); -//! -//! let opts = &[ -//! optopt("o", "", "set output file name", "NAME"), -//! optflag("h", "help", "print this help menu") -//! ]; -//! let matches = match getopts(args.tail(), opts) { -//! Ok(m) => { m } -//! Err(f) => { panic!(f.to_string()) } -//! }; -//! if matches.opt_present("h") { -//! print_usage(program, opts); -//! return; -//! } -//! let output = matches.opt_str("o"); -//! let input = if !matches.free.is_empty() { -//! matches.free[0].clone() -//! } else { -//! print_usage(program, opts); -//! return; -//! }; -//! do_work(input, output); -//! } -//! ``` - - -// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364) -#![cfg_attr(stage0, feature(custom_attribute))] -#![crate_name = "getopts"] -#![unstable(feature = "rustc_private", - reason = "use the crates.io `getopts` library instead")] -#![staged_api] -#![crate_type = "rlib"] -#![crate_type = "dylib"] -#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "http://doc.rust-lang.org/nightly/", - html_playground_url = "http://play.rust-lang.org/")] - -#![deny(missing_docs)] -#![feature(staged_api)] -#![feature(str_char)] -#![cfg_attr(test, feature(rustc_private))] - -#[cfg(test)] #[macro_use] extern crate log; - -use self::Name::*; -use self::HasArg::*; -use self::Occur::*; -use self::Fail::*; -use self::Optval::*; -use self::SplitWithinState::*; -use self::Whitespace::*; -use self::LengthLimit::*; - -use std::fmt; -use std::iter::repeat; -use std::result; - -/// Name of an option. Either a string or a single char. -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum Name { - /// A string representing the long name of an option. - /// For example: "help" - Long(String), - /// A char representing the short name of an option. - /// For example: 'h' - Short(char), -} - -/// Describes whether an option has an argument. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub enum HasArg { - /// The option requires an argument. - Yes, - /// The option takes no argument. - No, - /// The option argument is optional. - Maybe, -} - -/// Describes how often an option may occur. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub enum Occur { - /// The option occurs once. - Req, - /// The option occurs at most once. - Optional, - /// The option occurs zero or more times. - Multi, -} - -/// A description of a possible option. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Opt { - /// Name of the option - pub name: Name, - /// Whether it has an argument - pub hasarg: HasArg, - /// How often it can occur - pub occur: Occur, - /// Which options it aliases - pub aliases: Vec, -} - -/// One group of options, e.g., both `-h` and `--help`, along with -/// their shared description and properties. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct OptGroup { - /// Short name of the option, e.g. `h` for a `-h` option - pub short_name: String, - /// Long name of the option, e.g. `help` for a `--help` option - pub long_name: String, - /// Hint for argument, e.g. `FILE` for a `-o FILE` option - pub hint: String, - /// Description for usage help text - pub desc: String, - /// Whether option has an argument - pub hasarg: HasArg, - /// How often it can occur - pub occur: Occur -} - -/// Describes whether an option is given at all or has a value. -#[derive(Clone, PartialEq, Eq, Debug)] -enum Optval { - Val(String), - Given, -} - -/// The result of checking command line arguments. Contains a vector -/// of matches and a vector of free strings. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct Matches { - /// Options that matched - opts: Vec, - /// Values of the Options that matched - vals: Vec>, - /// Free string fragments - pub free: Vec, -} - -/// The type returned when the command line does not conform to the -/// expected format. Use the `Debug` implementation to output detailed -/// information. -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum Fail { - /// The option requires an argument but none was passed. - ArgumentMissing(String), - /// The passed option is not declared among the possible options. - UnrecognizedOption(String), - /// A required option is not present. - OptionMissing(String), - /// A single occurrence option is being used multiple times. - OptionDuplicated(String), - /// There's an argument being passed to a non-argument option. - UnexpectedArgument(String), -} - -/// The type of failure that occurred. -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -#[allow(missing_docs)] -pub enum FailType { - ArgumentMissing_, - UnrecognizedOption_, - OptionMissing_, - OptionDuplicated_, - UnexpectedArgument_, -} - -/// The result of parsing a command line with a set of options. -pub type Result = result::Result; - -impl Name { - fn from_str(nm: &str) -> Name { - if nm.len() == 1 { - Short(nm.char_at(0)) - } else { - Long(nm.to_string()) - } - } - - fn to_string(&self) -> String { - match *self { - Short(ch) => ch.to_string(), - Long(ref s) => s.to_string() - } - } -} - -impl OptGroup { - /// Translate OptGroup into Opt. - /// (Both short and long names correspond to different Opts). - pub fn long_to_short(&self) -> Opt { - let OptGroup { - short_name, - long_name, - hasarg, - occur, - .. - } = (*self).clone(); - - match (short_name.len(), long_name.len()) { - (0,0) => panic!("this long-format option was given no name"), - (0,_) => Opt { - name: Long((long_name)), - hasarg: hasarg, - occur: occur, - aliases: Vec::new() - }, - (1,0) => Opt { - name: Short(short_name.char_at(0)), - hasarg: hasarg, - occur: occur, - aliases: Vec::new() - }, - (1,_) => Opt { - name: Long((long_name)), - hasarg: hasarg, - occur: occur, - aliases: vec!( - Opt { - name: Short(short_name.char_at(0)), - hasarg: hasarg, - occur: occur, - aliases: Vec::new() - } - ) - }, - (_,_) => panic!("something is wrong with the long-form opt") - } - } -} - -impl Matches { - fn opt_vals(&self, nm: &str) -> Vec { - match find_opt(&self.opts[..], Name::from_str(nm)) { - Some(id) => self.vals[id].clone(), - None => panic!("No option '{}' defined", nm) - } - } - - fn opt_val(&self, nm: &str) -> Option { - let vals = self.opt_vals(nm); - if vals.is_empty() { - None - } else { - Some(vals[0].clone()) - } - } - - /// Returns true if an option was matched. - pub fn opt_present(&self, nm: &str) -> bool { - !self.opt_vals(nm).is_empty() - } - - /// Returns the number of times an option was matched. - pub fn opt_count(&self, nm: &str) -> usize { - self.opt_vals(nm).len() - } - - /// Returns true if any of several options were matched. - pub fn opts_present(&self, names: &[String]) -> bool { - for nm in names { - match find_opt(&self.opts, Name::from_str(&**nm)) { - Some(id) if !self.vals[id].is_empty() => return true, - _ => (), - }; - } - false - } - - /// Returns the string argument supplied to one of several matching options or `None`. - pub fn opts_str(&self, names: &[String]) -> Option { - for nm in names { - match self.opt_val(&nm[..]) { - Some(Val(ref s)) => return Some(s.clone()), - _ => () - } - } - None - } - - /// Returns a vector of the arguments provided to all matches of the given - /// option. - /// - /// Used when an option accepts multiple values. - pub fn opt_strs(&self, nm: &str) -> Vec { - let mut acc: Vec = Vec::new(); - let r = self.opt_vals(nm); - for v in &r { - match *v { - Val(ref s) => acc.push((*s).clone()), - _ => () - } - } - acc - } - - /// Returns the string argument supplied to a matching option or `None`. - pub fn opt_str(&self, nm: &str) -> Option { - let vals = self.opt_vals(nm); - if vals.is_empty() { - return None::; - } - match vals[0] { - Val(ref s) => Some((*s).clone()), - _ => None - } - } - - - /// Returns the matching string, a default, or none. - /// - /// Returns none if the option was not present, `def` if the option was - /// present but no argument was provided, and the argument if the option was - /// present and an argument was provided. - pub fn opt_default(&self, nm: &str, def: &str) -> Option { - let vals = self.opt_vals(nm); - if vals.is_empty() { - None - } else { - match vals[0] { - Val(ref s) => Some((*s).clone()), - _ => Some(def.to_string()) - } - } - } - -} - -fn is_arg(arg: &str) -> bool { - arg.len() > 1 && arg.as_bytes()[0] == b'-' -} - -fn find_opt(opts: &[Opt], nm: Name) -> Option { - // Search main options. - let pos = opts.iter().position(|opt| opt.name == nm); - if pos.is_some() { - return pos - } - - // Search in aliases. - for candidate in opts { - if candidate.aliases.iter().position(|opt| opt.name == nm).is_some() { - return opts.iter().position(|opt| opt.name == candidate.name); - } - } - - None -} - -/// Create a long option that is required and takes an argument. -/// -/// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none -/// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none -/// * `desc` - Description for usage help -/// * `hint` - Hint that is used in place of the argument in the usage help, -/// e.g. `"FILE"` for a `-o FILE` option -pub fn reqopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { - let len = short_name.len(); - assert!(len == 1 || len == 0); - OptGroup { - short_name: short_name.to_string(), - long_name: long_name.to_string(), - hint: hint.to_string(), - desc: desc.to_string(), - hasarg: Yes, - occur: Req - } -} - -/// Create a long option that is optional and takes an argument. -/// -/// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none -/// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none -/// * `desc` - Description for usage help -/// * `hint` - Hint that is used in place of the argument in the usage help, -/// e.g. `"FILE"` for a `-o FILE` option -pub fn optopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { - let len = short_name.len(); - assert!(len == 1 || len == 0); - OptGroup { - short_name: short_name.to_string(), - long_name: long_name.to_string(), - hint: hint.to_string(), - desc: desc.to_string(), - hasarg: Yes, - occur: Optional - } -} - -/// Create a long option that is optional and does not take an argument. -/// -/// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none -/// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none -/// * `desc` - Description for usage help -pub fn optflag(short_name: &str, long_name: &str, desc: &str) -> OptGroup { - let len = short_name.len(); - assert!(len == 1 || len == 0); - OptGroup { - short_name: short_name.to_string(), - long_name: long_name.to_string(), - hint: "".to_string(), - desc: desc.to_string(), - hasarg: No, - occur: Optional - } -} - -/// Create a long option that can occur more than once and does not -/// take an argument. -/// -/// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none -/// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none -/// * `desc` - Description for usage help -pub fn optflagmulti(short_name: &str, long_name: &str, desc: &str) -> OptGroup { - let len = short_name.len(); - assert!(len == 1 || len == 0); - OptGroup { - short_name: short_name.to_string(), - long_name: long_name.to_string(), - hint: "".to_string(), - desc: desc.to_string(), - hasarg: No, - occur: Multi - } -} - -/// Create a long option that is optional and takes an optional argument. -/// -/// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none -/// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none -/// * `desc` - Description for usage help -/// * `hint` - Hint that is used in place of the argument in the usage help, -/// e.g. `"FILE"` for a `-o FILE` option -pub fn optflagopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { - let len = short_name.len(); - assert!(len == 1 || len == 0); - OptGroup { - short_name: short_name.to_string(), - long_name: long_name.to_string(), - hint: hint.to_string(), - desc: desc.to_string(), - hasarg: Maybe, - occur: Optional - } -} - -/// Create a long option that is optional, takes an argument, and may occur -/// multiple times. -/// -/// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none -/// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none -/// * `desc` - Description for usage help -/// * `hint` - Hint that is used in place of the argument in the usage help, -/// e.g. `"FILE"` for a `-o FILE` option -pub fn optmulti(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptGroup { - let len = short_name.len(); - assert!(len == 1 || len == 0); - OptGroup { - short_name: short_name.to_string(), - long_name: long_name.to_string(), - hint: hint.to_string(), - desc: desc.to_string(), - hasarg: Yes, - occur: Multi - } -} - -/// Create a generic option group, stating all parameters explicitly -pub fn opt(short_name: &str, - long_name: &str, - desc: &str, - hint: &str, - hasarg: HasArg, - occur: Occur) -> OptGroup { - let len = short_name.len(); - assert!(len == 1 || len == 0); - OptGroup { - short_name: short_name.to_string(), - long_name: long_name.to_string(), - hint: hint.to_string(), - desc: desc.to_string(), - hasarg: hasarg, - occur: occur - } -} - -impl Fail { - /// Convert a `Fail` enum into an error string. - #[unstable(feature = "rustc_private")] - #[deprecated(since = "1.0.0", - reason = "use `fmt::Display` (`{}` format specifier)")] - pub fn to_err_msg(self) -> String { - self.to_string() - } -} - -impl fmt::Display for Fail { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - ArgumentMissing(ref nm) => { - write!(f, "Argument to option '{}' missing.", *nm) - } - UnrecognizedOption(ref nm) => { - write!(f, "Unrecognized option: '{}'.", *nm) - } - OptionMissing(ref nm) => { - write!(f, "Required option '{}' missing.", *nm) - } - OptionDuplicated(ref nm) => { - write!(f, "Option '{}' given more than once.", *nm) - } - UnexpectedArgument(ref nm) => { - write!(f, "Option '{}' does not take an argument.", *nm) - } - } - } -} - -/// Parse command line arguments according to the provided options. -/// -/// On success returns `Ok(Matches)`. Use methods such as `opt_present` -/// `opt_str`, etc. to interrogate results. -/// # Panics -/// -/// Returns `Err(Fail)` on failure: use the `Debug` implementation of `Fail` to display -/// information about it. -pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { - let opts: Vec = optgrps.iter().map(|x| x.long_to_short()).collect(); - let n_opts = opts.len(); - - fn f(_x: usize) -> Vec { return Vec::new(); } - - let mut vals: Vec<_> = (0..n_opts).map(f).collect(); - let mut free: Vec = Vec::new(); - let l = args.len(); - let mut i = 0; - while i < l { - let cur = args[i].clone(); - let curlen = cur.len(); - if !is_arg(&cur[..]) { - free.push(cur); - } else if cur == "--" { - let mut j = i + 1; - while j < l { free.push(args[j].clone()); j += 1; } - break; - } else { - let mut names; - let mut i_arg = None; - if cur.as_bytes()[1] == b'-' { - let tail = &cur[2..curlen]; - let tail_eq: Vec<&str> = tail.split('=').collect(); - if tail_eq.len() <= 1 { - names = vec!(Long(tail.to_string())); - } else { - names = - vec!(Long(tail_eq[0].to_string())); - i_arg = Some(tail_eq[1].to_string()); - } - } else { - let mut j = 1; - names = Vec::new(); - while j < curlen { - let ch = cur.char_at(j); - let opt = Short(ch); - - /* In a series of potential options (eg. -aheJ), if we - see one which takes an argument, we assume all - subsequent characters make up the argument. This - allows options such as -L/usr/local/lib/foo to be - interpreted correctly - */ - - let opt_id = match find_opt(&opts, opt.clone()) { - Some(id) => id, - None => return Err(UnrecognizedOption(opt.to_string())) - }; - - names.push(opt); - - let arg_follows = match opts[opt_id].hasarg { - Yes | Maybe => true, - No => false - }; - - let next = j + ch.len_utf8(); - if arg_follows && next < curlen { - i_arg = Some((&cur[next..curlen]).to_string()); - break; - } - - j = next; - } - } - let mut name_pos = 0; - for nm in &names { - name_pos += 1; - let optid = match find_opt(&opts, (*nm).clone()) { - Some(id) => id, - None => return Err(UnrecognizedOption(nm.to_string())) - }; - match opts[optid].hasarg { - No => { - if name_pos == names.len() && !i_arg.is_none() { - return Err(UnexpectedArgument(nm.to_string())); - } - let v = &mut vals[optid]; - v.push(Given); - } - Maybe => { - if !i_arg.is_none() { - let v = &mut vals[optid]; - v.push(Val((i_arg.clone()) - .unwrap())); - } else if name_pos < names.len() || i + 1 == l || - is_arg(&args[i + 1][..]) { - let v = &mut vals[optid]; - v.push(Given); - } else { - i += 1; - let v = &mut vals[optid]; - v.push(Val(args[i].clone())); - } - } - Yes => { - if !i_arg.is_none() { - let v = &mut vals[optid]; - v.push(Val(i_arg.clone().unwrap())); - } else if i + 1 == l { - return Err(ArgumentMissing(nm.to_string())); - } else { - i += 1; - let v = &mut vals[optid]; - v.push(Val(args[i].clone())); - } - } - } - } - } - i += 1; - } - for i in 0..n_opts { - let n = vals[i].len(); - let occ = opts[i].occur; - if occ == Req && n == 0 { - return Err(OptionMissing(opts[i].name.to_string())); - } - if occ != Multi && n > 1 { - return Err(OptionDuplicated(opts[i].name.to_string())); - } - } - Ok(Matches { - opts: opts, - vals: vals, - free: free - }) -} - -/// Derive a usage message from a set of long options. -pub fn usage(brief: &str, opts: &[OptGroup]) -> String { - - let desc_sep = format!("\n{}", repeat(" ").take(24).collect::()); - - let rows = opts.iter().map(|optref| { - let OptGroup{short_name, - long_name, - hint, - desc, - hasarg, - ..} = (*optref).clone(); - - let mut row = repeat(" ").take(4).collect::(); - - // short option - match short_name.len() { - 0 => {} - 1 => { - row.push('-'); - row.push_str(&short_name[..]); - row.push(' '); - } - _ => panic!("the short name should only be 1 ascii char long"), - } - - // long option - match long_name.len() { - 0 => {} - _ => { - row.push_str("--"); - row.push_str(&long_name[..]); - row.push(' '); - } - } - - // arg - match hasarg { - No => {} - Yes => row.push_str(&hint[..]), - Maybe => { - row.push('['); - row.push_str(&hint[..]); - row.push(']'); - } - } - - // FIXME: #5516 should be graphemes not codepoints - // here we just need to indent the start of the description - let rowlen = row.chars().count(); - if rowlen < 24 { - for _ in 0..24 - rowlen { - row.push(' '); - } - } else { - row.push_str(&desc_sep[..]); - } - - // Normalize desc to contain words separated by one space character - let mut desc_normalized_whitespace = String::new(); - for word in desc.split_whitespace() { - desc_normalized_whitespace.push_str(word); - desc_normalized_whitespace.push(' '); - } - - // FIXME: #5516 should be graphemes not codepoints - let mut desc_rows = Vec::new(); - each_split_within(&desc_normalized_whitespace[..], 54, |substr| { - desc_rows.push(substr.to_string()); - true - }); - - // FIXME: #5516 should be graphemes not codepoints - // wrapped description - row.push_str(&desc_rows.connect(&desc_sep[..])); - - row - }); - - format!("{}\n\nOptions:\n{}\n", brief, - rows.collect::>().connect("\n")) -} - -fn format_option(opt: &OptGroup) -> String { - let mut line = String::new(); - - if opt.occur != Req { - line.push('['); - } - - // Use short_name is possible, but fallback to long_name. - if !opt.short_name.is_empty() { - line.push('-'); - line.push_str(&opt.short_name[..]); - } else { - line.push_str("--"); - line.push_str(&opt.long_name[..]); - } - - if opt.hasarg != No { - line.push(' '); - if opt.hasarg == Maybe { - line.push('['); - } - line.push_str(&opt.hint[..]); - if opt.hasarg == Maybe { - line.push(']'); - } - } - - if opt.occur != Req { - line.push(']'); - } - if opt.occur == Multi { - line.push_str(".."); - } - - line -} - -/// Derive a short one-line usage summary from a set of long options. -pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String { - let mut line = format!("Usage: {} ", program_name); - line.push_str(&opts.iter() - .map(format_option) - .collect::>() - .connect(" ")[..]); - line -} - -#[derive(Copy, Clone)] -enum SplitWithinState { - A, // leading whitespace, initial state - B, // words - C, // internal and trailing whitespace -} -#[derive(Copy, Clone)] -enum Whitespace { - Ws, // current char is whitespace - Cr // current char is not whitespace -} -#[derive(Copy, Clone)] -enum LengthLimit { - UnderLim, // current char makes current substring still fit in limit - OverLim // current char makes current substring no longer fit in limit -} - - -/// Splits a string into substrings with possibly internal whitespace, -/// each of them at most `lim` bytes long. The substrings have leading and trailing -/// whitespace removed, and are only cut at whitespace boundaries. -/// -/// Note: Function was moved here from `std::str` because this module is the only place that -/// uses it, and because it was too specific for a general string function. -/// -/// # Panics -/// -/// Panics during iteration if the string contains a non-whitespace -/// sequence longer than the limit. -fn each_split_within(ss: &str, lim: usize, mut it: F) -> bool where - F: FnMut(&str) -> bool -{ - // Just for fun, let's write this as a state machine: - - let mut slice_start = 0; - let mut last_start = 0; - let mut last_end = 0; - let mut state = A; - let mut fake_i = ss.len(); - let mut lim = lim; - - let mut cont = true; - - // if the limit is larger than the string, lower it to save cycles - if lim >= fake_i { - lim = fake_i; - } - - let mut machine = |cont: &mut bool, (i, c): (usize, char)| -> bool { - let whitespace = if c.is_whitespace() { Ws } else { Cr }; - let limit = if (i - slice_start + 1) <= lim { UnderLim } else { OverLim }; - - state = match (state, whitespace, limit) { - (A, Ws, _) => { A } - (A, Cr, _) => { slice_start = i; last_start = i; B } - - (B, Cr, UnderLim) => { B } - (B, Cr, OverLim) if (i - last_start + 1) > lim - => panic!("word starting with {} longer than limit!", - &ss[last_start..i + 1]), - (B, Cr, OverLim) => { - *cont = it(&ss[slice_start..last_end]); - slice_start = last_start; - B - } - (B, Ws, UnderLim) => { - last_end = i; - C - } - (B, Ws, OverLim) => { - last_end = i; - *cont = it(&ss[slice_start..last_end]); - A - } - - (C, Cr, UnderLim) => { - last_start = i; - B - } - (C, Cr, OverLim) => { - *cont = it(&ss[slice_start..last_end]); - slice_start = i; - last_start = i; - last_end = i; - B - } - (C, Ws, OverLim) => { - *cont = it(&ss[slice_start..last_end]); - A - } - (C, Ws, UnderLim) => { - C - } - }; - - *cont - }; - - ss.char_indices().all(|x| machine(&mut cont, x)); - - // Let the automaton 'run out' by supplying trailing whitespace - while cont && match state { B | C => true, A => false } { - machine(&mut cont, (fake_i, ' ')); - fake_i += 1; - } - return cont; -} - -#[test] -fn test_split_within() { - fn t(s: &str, i: usize, u: &[String]) { - let mut v = Vec::new(); - each_split_within(s, i, |s| { v.push(s.to_string()); true }); - assert!(v.iter().zip(u.iter()).all(|(a,b)| a == b)); - } - t("", 0, &[]); - t("", 15, &[]); - t("hello", 15, &["hello".to_string()]); - t("\nMary had a little lamb\nLittle lamb\n", 15, &[ - "Mary had a".to_string(), - "little lamb".to_string(), - "Little lamb".to_string() - ]); - t("\nMary had a little lamb\nLittle lamb\n", ::std::usize::MAX, - &["Mary had a little lamb\nLittle lamb".to_string()]); -} - -#[cfg(test)] -mod tests { - use super::*; - use super::Fail::*; - - use std::result::Result::{Err, Ok}; - use std::result; - - // Tests for reqopt - #[test] - fn test_reqopt() { - let long_args = vec!("--test=20".to_string()); - let opts = vec!(reqopt("t", "test", "testing", "TEST")); - let rs = getopts(&long_args, &opts); - match rs { - Ok(ref m) => { - assert!(m.opt_present("test")); - assert_eq!(m.opt_str("test").unwrap(), "20"); - assert!(m.opt_present("t")); - assert_eq!(m.opt_str("t").unwrap(), "20"); - } - _ => { panic!("test_reqopt failed (long arg)"); } - } - let short_args = vec!("-t".to_string(), "20".to_string()); - match getopts(&short_args, &opts) { - Ok(ref m) => { - assert!((m.opt_present("test"))); - assert_eq!(m.opt_str("test").unwrap(), "20"); - assert!((m.opt_present("t"))); - assert_eq!(m.opt_str("t").unwrap(), "20"); - } - _ => { panic!("test_reqopt failed (short arg)"); } - } - } - - #[test] - fn test_reqopt_missing() { - let args = vec!("blah".to_string()); - let opts = vec!(reqopt("t", "test", "testing", "TEST")); - let rs = getopts(&args, &opts); - match rs { - Err(OptionMissing(_)) => {}, - _ => panic!() - } - } - - #[test] - fn test_reqopt_no_arg() { - let long_args = vec!("--test".to_string()); - let opts = vec!(reqopt("t", "test", "testing", "TEST")); - let rs = getopts(&long_args, &opts); - match rs { - Err(ArgumentMissing(_)) => {}, - _ => panic!() - } - let short_args = vec!("-t".to_string()); - match getopts(&short_args, &opts) { - Err(ArgumentMissing(_)) => {}, - _ => panic!() - } - } - - #[test] - fn test_reqopt_multi() { - let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string()); - let opts = vec!(reqopt("t", "test", "testing", "TEST")); - let rs = getopts(&args, &opts); - match rs { - Err(OptionDuplicated(_)) => {}, - _ => panic!() - } - } - - // Tests for optopt - #[test] - fn test_optopt() { - let long_args = vec!("--test=20".to_string()); - let opts = vec!(optopt("t", "test", "testing", "TEST")); - let rs = getopts(&long_args, &opts); - match rs { - Ok(ref m) => { - assert!(m.opt_present("test")); - assert_eq!(m.opt_str("test").unwrap(), "20"); - assert!((m.opt_present("t"))); - assert_eq!(m.opt_str("t").unwrap(), "20"); - } - _ => panic!() - } - let short_args = vec!("-t".to_string(), "20".to_string()); - match getopts(&short_args, &opts) { - Ok(ref m) => { - assert!((m.opt_present("test"))); - assert_eq!(m.opt_str("test").unwrap(), "20"); - assert!((m.opt_present("t"))); - assert_eq!(m.opt_str("t").unwrap(), "20"); - } - _ => panic!() - } - } - - #[test] - fn test_optopt_missing() { - let args = vec!("blah".to_string()); - let opts = vec!(optopt("t", "test", "testing", "TEST")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert!(!m.opt_present("test")); - assert!(!m.opt_present("t")); - } - _ => panic!() - } - } - - #[test] - fn test_optopt_no_arg() { - let long_args = vec!("--test".to_string()); - let opts = vec!(optopt("t", "test", "testing", "TEST")); - let rs = getopts(&long_args, &opts); - match rs { - Err(ArgumentMissing(_)) => {}, - _ => panic!() - } - let short_args = vec!("-t".to_string()); - match getopts(&short_args, &opts) { - Err(ArgumentMissing(_)) => {}, - _ => panic!() - } - } - - #[test] - fn test_optopt_multi() { - let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string()); - let opts = vec!(optopt("t", "test", "testing", "TEST")); - let rs = getopts(&args, &opts); - match rs { - Err(OptionDuplicated(_)) => {}, - _ => panic!() - } - } - - // Tests for optflag - #[test] - fn test_optflag() { - let long_args = vec!("--test".to_string()); - let opts = vec!(optflag("t", "test", "testing")); - let rs = getopts(&long_args, &opts); - match rs { - Ok(ref m) => { - assert!(m.opt_present("test")); - assert!(m.opt_present("t")); - } - _ => panic!() - } - let short_args = vec!("-t".to_string()); - match getopts(&short_args, &opts) { - Ok(ref m) => { - assert!(m.opt_present("test")); - assert!(m.opt_present("t")); - } - _ => panic!() - } - } - - #[test] - fn test_optflag_missing() { - let args = vec!("blah".to_string()); - let opts = vec!(optflag("t", "test", "testing")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert!(!m.opt_present("test")); - assert!(!m.opt_present("t")); - } - _ => panic!() - } - } - - #[test] - fn test_optflag_long_arg() { - let args = vec!("--test=20".to_string()); - let opts = vec!(optflag("t", "test", "testing")); - let rs = getopts(&args, &opts); - match rs { - Err(UnexpectedArgument(_)) => {}, - _ => panic!() - } - } - - #[test] - fn test_optflag_multi() { - let args = vec!("--test".to_string(), "-t".to_string()); - let opts = vec!(optflag("t", "test", "testing")); - let rs = getopts(&args, &opts); - match rs { - Err(OptionDuplicated(_)) => {}, - _ => panic!() - } - } - - #[test] - fn test_optflag_short_arg() { - let args = vec!("-t".to_string(), "20".to_string()); - let opts = vec!(optflag("t", "test", "testing")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - // The next variable after the flag is just a free argument - - assert!(m.free[0] == "20"); - } - _ => panic!() - } - } - - // Tests for optflagmulti - #[test] - fn test_optflagmulti_short1() { - let args = vec!("-v".to_string()); - let opts = vec!(optflagmulti("v", "verbose", "verbosity")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert_eq!(m.opt_count("v"), 1); - } - _ => panic!() - } - } - - #[test] - fn test_optflagmulti_short2a() { - let args = vec!("-v".to_string(), "-v".to_string()); - let opts = vec!(optflagmulti("v", "verbose", "verbosity")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert_eq!(m.opt_count("v"), 2); - } - _ => panic!() - } - } - - #[test] - fn test_optflagmulti_short2b() { - let args = vec!("-vv".to_string()); - let opts = vec!(optflagmulti("v", "verbose", "verbosity")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert_eq!(m.opt_count("v"), 2); - } - _ => panic!() - } - } - - #[test] - fn test_optflagmulti_long1() { - let args = vec!("--verbose".to_string()); - let opts = vec!(optflagmulti("v", "verbose", "verbosity")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert_eq!(m.opt_count("verbose"), 1); - } - _ => panic!() - } - } - - #[test] - fn test_optflagmulti_long2() { - let args = vec!("--verbose".to_string(), "--verbose".to_string()); - let opts = vec!(optflagmulti("v", "verbose", "verbosity")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert_eq!(m.opt_count("verbose"), 2); - } - _ => panic!() - } - } - - #[test] - fn test_optflagmulti_mix() { - let args = vec!("--verbose".to_string(), "-v".to_string(), - "-vv".to_string(), "verbose".to_string()); - let opts = vec!(optflagmulti("v", "verbose", "verbosity")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert_eq!(m.opt_count("verbose"), 4); - assert_eq!(m.opt_count("v"), 4); - } - _ => panic!() - } - } - - // Tests for optmulti - #[test] - fn test_optmulti() { - let long_args = vec!("--test=20".to_string()); - let opts = vec!(optmulti("t", "test", "testing", "TEST")); - let rs = getopts(&long_args, &opts); - match rs { - Ok(ref m) => { - assert!((m.opt_present("test"))); - assert_eq!(m.opt_str("test").unwrap(), "20"); - assert!((m.opt_present("t"))); - assert_eq!(m.opt_str("t").unwrap(), "20"); - } - _ => panic!() - } - let short_args = vec!("-t".to_string(), "20".to_string()); - match getopts(&short_args, &opts) { - Ok(ref m) => { - assert!((m.opt_present("test"))); - assert_eq!(m.opt_str("test").unwrap(), "20"); - assert!((m.opt_present("t"))); - assert_eq!(m.opt_str("t").unwrap(), "20"); - } - _ => panic!() - } - } - - #[test] - fn test_optmulti_missing() { - let args = vec!("blah".to_string()); - let opts = vec!(optmulti("t", "test", "testing", "TEST")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert!(!m.opt_present("test")); - assert!(!m.opt_present("t")); - } - _ => panic!() - } - } - - #[test] - fn test_optmulti_no_arg() { - let long_args = vec!("--test".to_string()); - let opts = vec!(optmulti("t", "test", "testing", "TEST")); - let rs = getopts(&long_args, &opts); - match rs { - Err(ArgumentMissing(_)) => {}, - _ => panic!() - } - let short_args = vec!("-t".to_string()); - match getopts(&short_args, &opts) { - Err(ArgumentMissing(_)) => {}, - _ => panic!() - } - } - - #[test] - fn test_optmulti_multi() { - let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string()); - let opts = vec!(optmulti("t", "test", "testing", "TEST")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert!(m.opt_present("test")); - assert_eq!(m.opt_str("test").unwrap(), "20"); - assert!(m.opt_present("t")); - assert_eq!(m.opt_str("t").unwrap(), "20"); - let pair = m.opt_strs("test"); - assert!(pair[0] == "20"); - assert!(pair[1] == "30"); - } - _ => panic!() - } - } - - #[test] - fn test_unrecognized_option() { - let long_args = vec!("--untest".to_string()); - let opts = vec!(optmulti("t", "test", "testing", "TEST")); - let rs = getopts(&long_args, &opts); - match rs { - Err(UnrecognizedOption(_)) => {}, - _ => panic!() - } - let short_args = vec!("-u".to_string()); - match getopts(&short_args, &opts) { - Err(UnrecognizedOption(_)) => {}, - _ => panic!() - } - } - - #[test] - fn test_combined() { - let args = - vec!("prog".to_string(), - "free1".to_string(), - "-s".to_string(), - "20".to_string(), - "free2".to_string(), - "--flag".to_string(), - "--long=30".to_string(), - "-f".to_string(), - "-m".to_string(), - "40".to_string(), - "-m".to_string(), - "50".to_string(), - "-n".to_string(), - "-A B".to_string(), - "-n".to_string(), - "-60 70".to_string()); - let opts = - vec!(optopt("s", "something", "something", "SOMETHING"), - optflag("", "flag", "a flag"), - reqopt("", "long", "hi", "LONG"), - optflag("f", "", "another flag"), - optmulti("m", "", "mmmmmm", "YUM"), - optmulti("n", "", "nothing", "NOTHING"), - optopt("", "notpresent", "nothing to see here", "NOPE")); - let rs = getopts(&args, &opts); - match rs { - Ok(ref m) => { - assert!(m.free[0] == "prog"); - assert!(m.free[1] == "free1"); - assert_eq!(m.opt_str("s").unwrap(), "20"); - assert!(m.free[2] == "free2"); - assert!((m.opt_present("flag"))); - assert_eq!(m.opt_str("long").unwrap(), "30"); - assert!((m.opt_present("f"))); - let pair = m.opt_strs("m"); - assert!(pair[0] == "40"); - assert!(pair[1] == "50"); - let pair = m.opt_strs("n"); - assert!(pair[0] == "-A B"); - assert!(pair[1] == "-60 70"); - assert!((!m.opt_present("notpresent"))); - } - _ => panic!() - } - } - - #[test] - fn test_multi() { - let opts = vec!(optopt("e", "", "encrypt", "ENCRYPT"), - optopt("", "encrypt", "encrypt", "ENCRYPT"), - optopt("f", "", "flag", "FLAG")); - - let args_single = vec!("-e".to_string(), "foo".to_string()); - let matches_single = &match getopts(&args_single, &opts) { - result::Result::Ok(m) => m, - result::Result::Err(_) => panic!() - }; - assert!(matches_single.opts_present(&["e".to_string()])); - assert!(matches_single.opts_present(&["encrypt".to_string(), "e".to_string()])); - assert!(matches_single.opts_present(&["e".to_string(), "encrypt".to_string()])); - assert!(!matches_single.opts_present(&["encrypt".to_string()])); - assert!(!matches_single.opts_present(&["thing".to_string()])); - assert!(!matches_single.opts_present(&[])); - - assert_eq!(matches_single.opts_str(&["e".to_string()]).unwrap(), "foo"); - assert_eq!(matches_single.opts_str(&["e".to_string(), "encrypt".to_string()]).unwrap(), - "foo"); - assert_eq!(matches_single.opts_str(&["encrypt".to_string(), "e".to_string()]).unwrap(), - "foo"); - - let args_both = vec!("-e".to_string(), "foo".to_string(), "--encrypt".to_string(), - "foo".to_string()); - let matches_both = &match getopts(&args_both, &opts) { - result::Result::Ok(m) => m, - result::Result::Err(_) => panic!() - }; - assert!(matches_both.opts_present(&["e".to_string()])); - assert!(matches_both.opts_present(&["encrypt".to_string()])); - assert!(matches_both.opts_present(&["encrypt".to_string(), "e".to_string()])); - assert!(matches_both.opts_present(&["e".to_string(), "encrypt".to_string()])); - assert!(!matches_both.opts_present(&["f".to_string()])); - assert!(!matches_both.opts_present(&["thing".to_string()])); - assert!(!matches_both.opts_present(&[])); - - assert_eq!(matches_both.opts_str(&["e".to_string()]).unwrap(), "foo"); - assert_eq!(matches_both.opts_str(&["encrypt".to_string()]).unwrap(), "foo"); - assert_eq!(matches_both.opts_str(&["e".to_string(), "encrypt".to_string()]).unwrap(), - "foo"); - assert_eq!(matches_both.opts_str(&["encrypt".to_string(), "e".to_string()]).unwrap(), - "foo"); - } - - #[test] - fn test_nospace() { - let args = vec!("-Lfoo".to_string(), "-M.".to_string()); - let opts = vec!(optmulti("L", "", "library directory", "LIB"), - optmulti("M", "", "something", "MMMM")); - let matches = &match getopts(&args, &opts) { - result::Result::Ok(m) => m, - result::Result::Err(_) => panic!() - }; - assert!(matches.opts_present(&["L".to_string()])); - assert_eq!(matches.opts_str(&["L".to_string()]).unwrap(), "foo"); - assert!(matches.opts_present(&["M".to_string()])); - assert_eq!(matches.opts_str(&["M".to_string()]).unwrap(), "."); - - } - - #[test] - fn test_nospace_conflict() { - let args = vec!("-vvLverbose".to_string(), "-v".to_string() ); - let opts = vec!(optmulti("L", "", "library directory", "LIB"), - optflagmulti("v", "verbose", "Verbose")); - let matches = &match getopts(&args, &opts) { - result::Result::Ok(m) => m, - result::Result::Err(e) => panic!( "{}", e ) - }; - assert!(matches.opts_present(&["L".to_string()])); - assert_eq!(matches.opts_str(&["L".to_string()]).unwrap(), "verbose"); - assert!(matches.opts_present(&["v".to_string()])); - assert_eq!(3, matches.opt_count("v")); - } - - #[test] - fn test_long_to_short() { - let mut short = Opt { - name: Name::Long("banana".to_string()), - hasarg: HasArg::Yes, - occur: Occur::Req, - aliases: Vec::new(), - }; - short.aliases = vec!(Opt { name: Name::Short('b'), - hasarg: HasArg::Yes, - occur: Occur::Req, - aliases: Vec::new() }); - let verbose = reqopt("b", "banana", "some bananas", "VAL"); - - assert!(verbose.long_to_short() == short); - } - - #[test] - fn test_aliases_long_and_short() { - let opts = vec!( - optflagmulti("a", "apple", "Desc")); - - let args = vec!("-a".to_string(), "--apple".to_string(), "-a".to_string()); - - let matches = getopts(&args, &opts).unwrap(); - assert_eq!(3, matches.opt_count("a")); - assert_eq!(3, matches.opt_count("apple")); - } - - #[test] - fn test_usage() { - let optgroups = vec!( - reqopt("b", "banana", "Desc", "VAL"), - optopt("a", "012345678901234567890123456789", - "Desc", "VAL"), - optflag("k", "kiwi", "Desc"), - optflagopt("p", "", "Desc", "VAL"), - optmulti("l", "", "Desc", "VAL")); - - let expected = -"Usage: fruits - -Options: - -b --banana VAL Desc - -a --012345678901234567890123456789 VAL - Desc - -k --kiwi Desc - -p [VAL] Desc - -l VAL Desc -"; - - let generated_usage = usage("Usage: fruits", &optgroups); - - debug!("expected: <<{}>>", expected); - debug!("generated: <<{}>>", generated_usage); - assert_eq!(generated_usage, expected); - } - - #[test] - fn test_usage_description_wrapping() { - // indentation should be 24 spaces - // lines wrap after 78: or rather descriptions wrap after 54 - - let optgroups = vec!( - optflag("k", "kiwi", - "This is a long description which won't be wrapped..+.."), // 54 - optflag("a", "apple", - "This is a long description which _will_ be wrapped..+..")); - - let expected = -"Usage: fruits - -Options: - -k --kiwi This is a long description which won't be wrapped..+.. - -a --apple This is a long description which _will_ be - wrapped..+.. -"; - - let usage = usage("Usage: fruits", &optgroups); - - debug!("expected: <<{}>>", expected); - debug!("generated: <<{}>>", usage); - assert!(usage == expected) - } - - #[test] - fn test_usage_description_multibyte_handling() { - let optgroups = vec!( - optflag("k", "k\u{2013}w\u{2013}", - "The word kiwi is normally spelled with two i's"), - optflag("a", "apple", - "This \u{201C}description\u{201D} has some characters that could \ -confuse the line wrapping; an apple costs 0.51€ in some parts of Europe.")); - - let expected = -"Usage: fruits - -Options: - -k --k–w– The word kiwi is normally spelled with two i's - -a --apple This “description” has some characters that could - confuse the line wrapping; an apple costs 0.51€ in - some parts of Europe. -"; - - let usage = usage("Usage: fruits", &optgroups); - - debug!("expected: <<{}>>", expected); - debug!("generated: <<{}>>", usage); - assert!(usage == expected) - } - - #[test] - fn test_short_usage() { - let optgroups = vec!( - reqopt("b", "banana", "Desc", "VAL"), - optopt("a", "012345678901234567890123456789", - "Desc", "VAL"), - optflag("k", "kiwi", "Desc"), - optflagopt("p", "", "Desc", "VAL"), - optmulti("l", "", "Desc", "VAL")); - - let expected = "Usage: fruits -b VAL [-a VAL] [-k] [-p [VAL]] [-l VAL]..".to_string(); - let generated_usage = short_usage("fruits", &optgroups); - - debug!("expected: <<{}>>", expected); - debug!("generated: <<{}>>", generated_usage); - assert_eq!(generated_usage, expected); - } -} diff --git a/src/libserialize/collection_impls.rs b/src/libserialize/collection_impls.rs deleted file mode 100644 index e7430f698e9c9..0000000000000 --- a/src/libserialize/collection_impls.rs +++ /dev/null @@ -1,253 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Implementations of serialization for structures found in libcollections - -use std::usize; -use std::default::Default; -use std::hash::Hash; -use std::collections::hash_state::HashState; - -use {Decodable, Encodable, Decoder, Encoder}; -use std::collections::{LinkedList, VecDeque, BTreeMap, BTreeSet, HashMap, HashSet, VecMap}; -use collections::enum_set::{EnumSet, CLike}; - -impl< - T: Encodable -> Encodable for LinkedList { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_seq(self.len(), |s| { - for (i, e) in self.iter().enumerate() { - try!(s.emit_seq_elt(i, |s| e.encode(s))); - } - Ok(()) - }) - } -} - -impl Decodable for LinkedList { - fn decode(d: &mut D) -> Result, D::Error> { - d.read_seq(|d, len| { - let mut list = LinkedList::new(); - for i in 0..len { - list.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); - } - Ok(list) - }) - } -} - -impl Encodable for VecDeque { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_seq(self.len(), |s| { - for (i, e) in self.iter().enumerate() { - try!(s.emit_seq_elt(i, |s| e.encode(s))); - } - Ok(()) - }) - } -} - -impl Decodable for VecDeque { - fn decode(d: &mut D) -> Result, D::Error> { - d.read_seq(|d, len| { - let mut deque: VecDeque = VecDeque::new(); - for i in 0..len { - deque.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); - } - Ok(deque) - }) - } -} - -impl< - K: Encodable + PartialEq + Ord, - V: Encodable + PartialEq -> Encodable for BTreeMap { - fn encode(&self, e: &mut S) -> Result<(), S::Error> { - e.emit_map(self.len(), |e| { - let mut i = 0; - for (key, val) in self { - try!(e.emit_map_elt_key(i, |e| key.encode(e))); - try!(e.emit_map_elt_val(i, |e| val.encode(e))); - i += 1; - } - Ok(()) - }) - } -} - -impl< - K: Decodable + PartialEq + Ord, - V: Decodable + PartialEq -> Decodable for BTreeMap { - fn decode(d: &mut D) -> Result, D::Error> { - d.read_map(|d, len| { - let mut map = BTreeMap::new(); - for i in 0..len { - let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); - let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); - map.insert(key, val); - } - Ok(map) - }) - } -} - -impl< - T: Encodable + PartialEq + Ord -> Encodable for BTreeSet { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_seq(self.len(), |s| { - let mut i = 0; - for e in self { - try!(s.emit_seq_elt(i, |s| e.encode(s))); - i += 1; - } - Ok(()) - }) - } -} - -impl< - T: Decodable + PartialEq + Ord -> Decodable for BTreeSet { - fn decode(d: &mut D) -> Result, D::Error> { - d.read_seq(|d, len| { - let mut set = BTreeSet::new(); - for i in 0..len { - set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); - } - Ok(set) - }) - } -} - -impl< - T: Encodable + CLike -> Encodable for EnumSet { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let mut bits = 0; - for item in self { - bits |= item.to_usize(); - } - s.emit_uint(bits) - } -} - -impl< - T: Decodable + CLike -> Decodable for EnumSet { - fn decode(d: &mut D) -> Result, D::Error> { - let bits = try!(d.read_uint()); - let mut set = EnumSet::new(); - for bit in 0..usize::BITS { - if bits & (1 << bit) != 0 { - set.insert(CLike::from_usize(1 << bit)); - } - } - Ok(set) - } -} - -impl Encodable for HashMap - where K: Encodable + Hash + Eq, - V: Encodable, - S: HashState, -{ - fn encode(&self, e: &mut E) -> Result<(), E::Error> { - e.emit_map(self.len(), |e| { - let mut i = 0; - for (key, val) in self { - try!(e.emit_map_elt_key(i, |e| key.encode(e))); - try!(e.emit_map_elt_val(i, |e| val.encode(e))); - i += 1; - } - Ok(()) - }) - } -} - -impl Decodable for HashMap - where K: Decodable + Hash + Eq, - V: Decodable, - S: HashState + Default, -{ - fn decode(d: &mut D) -> Result, D::Error> { - d.read_map(|d, len| { - let state = Default::default(); - let mut map = HashMap::with_capacity_and_hash_state(len, state); - for i in 0..len { - let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); - let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); - map.insert(key, val); - } - Ok(map) - }) - } -} - -impl Encodable for HashSet - where T: Encodable + Hash + Eq, - S: HashState, -{ - fn encode(&self, s: &mut E) -> Result<(), E::Error> { - s.emit_seq(self.len(), |s| { - let mut i = 0; - for e in self { - try!(s.emit_seq_elt(i, |s| e.encode(s))); - i += 1; - } - Ok(()) - }) - } -} - -impl Decodable for HashSet - where T: Decodable + Hash + Eq, - S: HashState + Default, -{ - fn decode(d: &mut D) -> Result, D::Error> { - d.read_seq(|d, len| { - let state = Default::default(); - let mut set = HashSet::with_capacity_and_hash_state(len, state); - for i in 0..len { - set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); - } - Ok(set) - }) - } -} - -impl Encodable for VecMap { - fn encode(&self, e: &mut S) -> Result<(), S::Error> { - e.emit_map(self.len(), |e| { - for (i, (key, val)) in self.iter().enumerate() { - try!(e.emit_map_elt_key(i, |e| key.encode(e))); - try!(e.emit_map_elt_val(i, |e| val.encode(e))); - } - Ok(()) - }) - } -} - -impl Decodable for VecMap { - fn decode(d: &mut D) -> Result, D::Error> { - d.read_map(|d, len| { - let mut map = VecMap::new(); - for i in 0..len { - let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); - let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); - map.insert(key, val); - } - Ok(map) - }) - } -} diff --git a/src/libserialize/hex.rs b/src/libserialize/hex.rs deleted file mode 100644 index 87f1dca2caed0..0000000000000 --- a/src/libserialize/hex.rs +++ /dev/null @@ -1,226 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Hex binary-to-text encoding - -pub use self::FromHexError::*; - -use std::fmt; -use std::error; - -/// A trait for converting a value to hexadecimal encoding -pub trait ToHex { - /// Converts the value of `self` to a hex value, returning the owned - /// string. - fn to_hex(&self) -> String; -} - -const CHARS: &'static [u8] = b"0123456789abcdef"; - -impl ToHex for [u8] { - /// Turn a vector of `u8` bytes into a hexadecimal string. - /// - /// # Examples - /// - /// ``` - /// # #![feature(rustc_private)] - /// extern crate serialize; - /// use serialize::hex::ToHex; - /// - /// fn main () { - /// let str = [52,32].to_hex(); - /// println!("{}", str); - /// } - /// ``` - fn to_hex(&self) -> String { - let mut v = Vec::with_capacity(self.len() * 2); - for &byte in self { - v.push(CHARS[(byte >> 4) as usize]); - v.push(CHARS[(byte & 0xf) as usize]); - } - - unsafe { - String::from_utf8_unchecked(v) - } - } -} - -/// A trait for converting hexadecimal encoded values -pub trait FromHex { - /// Converts the value of `self`, interpreted as hexadecimal encoded data, - /// into an owned vector of bytes, returning the vector. - fn from_hex(&self) -> Result, FromHexError>; -} - -/// Errors that can occur when decoding a hex encoded string -#[derive(Copy, Clone, Debug)] -pub enum FromHexError { - /// The input contained a character not part of the hex format - InvalidHexCharacter(char, usize), - /// The input had an invalid length - InvalidHexLength, -} - -impl fmt::Display for FromHexError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - InvalidHexCharacter(ch, idx) => - write!(f, "Invalid character '{}' at position {}", ch, idx), - InvalidHexLength => write!(f, "Invalid input length"), - } - } -} - -impl error::Error for FromHexError { - fn description(&self) -> &str { - match *self { - InvalidHexCharacter(_, _) => "invalid character", - InvalidHexLength => "invalid length", - } - } -} - - -impl FromHex for str { - /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) - /// to the byte values it encodes. - /// - /// You can use the `String::from_utf8` function to turn a - /// `Vec` into a string with characters corresponding to those values. - /// - /// # Examples - /// - /// This converts a string literal to hexadecimal and back. - /// - /// ``` - /// # #![feature(rustc_private)] - /// extern crate serialize; - /// use serialize::hex::{FromHex, ToHex}; - /// - /// fn main () { - /// let hello_str = "Hello, World".as_bytes().to_hex(); - /// println!("{}", hello_str); - /// let bytes = hello_str.from_hex().unwrap(); - /// println!("{:?}", bytes); - /// let result_str = String::from_utf8(bytes).unwrap(); - /// println!("{}", result_str); - /// } - /// ``` - fn from_hex(&self) -> Result, FromHexError> { - // This may be an overestimate if there is any whitespace - let mut b = Vec::with_capacity(self.len() / 2); - let mut modulus = 0; - let mut buf = 0; - - for (idx, byte) in self.bytes().enumerate() { - buf <<= 4; - - match byte { - b'A'...b'F' => buf |= byte - b'A' + 10, - b'a'...b'f' => buf |= byte - b'a' + 10, - b'0'...b'9' => buf |= byte - b'0', - b' '|b'\r'|b'\n'|b'\t' => { - buf >>= 4; - continue - } - _ => return Err(InvalidHexCharacter(self.char_at(idx), idx)), - } - - modulus += 1; - if modulus == 2 { - modulus = 0; - b.push(buf); - } - } - - match modulus { - 0 => Ok(b.into_iter().collect()), - _ => Err(InvalidHexLength), - } - } -} - -#[cfg(test)] -mod tests { - extern crate test; - use self::test::Bencher; - use hex::{FromHex, ToHex}; - - #[test] - pub fn test_to_hex() { - assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172"); - } - - #[test] - pub fn test_from_hex_okay() { - assert_eq!("666f6f626172".from_hex().unwrap(), - b"foobar"); - assert_eq!("666F6F626172".from_hex().unwrap(), - b"foobar"); - } - - #[test] - pub fn test_from_hex_odd_len() { - assert!("666".from_hex().is_err()); - assert!("66 6".from_hex().is_err()); - } - - #[test] - pub fn test_from_hex_invalid_char() { - assert!("66y6".from_hex().is_err()); - } - - #[test] - pub fn test_from_hex_ignores_whitespace() { - assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(), - b"foobar"); - } - - #[test] - pub fn test_to_hex_all_bytes() { - for i in 0..256 { - assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize)); - } - } - - #[test] - pub fn test_from_hex_all_bytes() { - for i in 0..256 { - let ii: &[u8] = &[i as u8]; - assert_eq!(format!("{:02x}", i as usize).from_hex() - .unwrap(), - ii); - assert_eq!(format!("{:02X}", i as usize).from_hex() - .unwrap(), - ii); - } - } - - #[bench] - pub fn bench_to_hex(b: &mut Bencher) { - let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ - ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; - b.iter(|| { - s.as_bytes().to_hex(); - }); - b.bytes = s.len() as u64; - } - - #[bench] - pub fn bench_from_hex(b: &mut Bencher) { - let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ - ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; - let sb = s.as_bytes().to_hex(); - b.iter(|| { - sb.from_hex().unwrap(); - }); - b.bytes = sb.len() as u64; - } -} diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs deleted file mode 100644 index 24cc7fe878af4..0000000000000 --- a/src/libserialize/json.rs +++ /dev/null @@ -1,4024 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Rust JSON serialization library -// Copyright (c) 2011 Google Inc. - -#![forbid(non_camel_case_types)] -#![allow(missing_docs)] - -//! JSON parsing and serialization -//! -//! # What is JSON? -//! -//! JSON (JavaScript Object Notation) is a way to write data in Javascript. -//! Like XML, it allows to encode structured data in a text format that can be easily read by humans -//! Its simple syntax and native compatibility with JavaScript have made it a widely used format. -//! -//! Data types that can be encoded are JavaScript types (see the `Json` enum for more details): -//! -//! * `Boolean`: equivalent to rust's `bool` -//! * `Number`: equivalent to rust's `f64` -//! * `String`: equivalent to rust's `String` -//! * `Array`: equivalent to rust's `Vec`, but also allowing objects of different types in the -//! same array -//! * `Object`: equivalent to rust's `BTreeMap` -//! * `Null` -//! -//! An object is a series of string keys mapping to values, in `"key": value` format. -//! Arrays are enclosed in square brackets ([ ... ]) and objects in curly brackets ({ ... }). -//! A simple JSON document encoding a person, their age, address and phone numbers could look like -//! -//! ```ignore -//! { -//! "FirstName": "John", -//! "LastName": "Doe", -//! "Age": 43, -//! "Address": { -//! "Street": "Downing Street 10", -//! "City": "London", -//! "Country": "Great Britain" -//! }, -//! "PhoneNumbers": [ -//! "+44 1234567", -//! "+44 2345678" -//! ] -//! } -//! ``` -//! -//! # Rust Type-based Encoding and Decoding -//! -//! Rust provides a mechanism for low boilerplate encoding & decoding of values to and from JSON via -//! the serialization API. -//! To be able to encode a piece of data, it must implement the `serialize::RustcEncodable` trait. -//! To be able to decode a piece of data, it must implement the `serialize::RustcDecodable` trait. -//! The Rust compiler provides an annotation to automatically generate the code for these traits: -//! `#[derive(RustcDecodable, RustcEncodable)]` -//! -//! The JSON API provides an enum `json::Json` and a trait `ToJson` to encode objects. -//! The `ToJson` trait provides a `to_json` method to convert an object into a `json::Json` value. -//! A `json::Json` value can be encoded as a string or buffer using the functions described above. -//! You can also use the `json::Encoder` object, which implements the `Encoder` trait. -//! -//! When using `ToJson` the `RustcEncodable` trait implementation is not mandatory. -//! -//! # Examples of use -//! -//! ## Using Autoserialization -//! -//! Create a struct called `TestStruct` and serialize and deserialize it to and from JSON using the -//! serialization API, using the derived serialization code. -//! -//! ```notrust -//! // FIXME(#19470): this cannot be ```rust``` because it fails orphan checking at the moment -//! extern crate serialize; -//! use serialize::json; -//! -//! // Automatically generate `Decodable` and `Encodable` trait implementations -//! #[derive(RustcDecodable, RustcEncodable)] -//! pub struct TestStruct { -//! data_int: u8, -//! data_str: String, -//! data_vector: Vec, -//! } -//! -//! fn main() { -//! let object = TestStruct { -//! data_int: 1, -//! data_str: "homura".to_string(), -//! data_vector: vec![2,3,4,5], -//! }; -//! -//! // Serialize using `json::encode` -//! let encoded = json::encode(&object).unwrap(); -//! -//! // Deserialize using `json::decode` -//! let decoded: TestStruct = json::decode(&encoded[..]).unwrap(); -//! } -//! ``` -//! -//! ## Using the `ToJson` trait -//! -//! The examples above use the `ToJson` trait to generate the JSON string, which is required -//! for custom mappings. -//! -//! ### Simple example of `ToJson` usage -//! -//! ```notrust -//! // FIXME(#19470): this cannot be ```rust``` because it fails orphan checking at the moment -//! extern crate serialize; -//! use serialize::json::{self, ToJson, Json}; -//! -//! // A custom data structure -//! struct ComplexNum { -//! a: f64, -//! b: f64, -//! } -//! -//! // JSON value representation -//! impl ToJson for ComplexNum { -//! fn to_json(&self) -> Json { -//! Json::String(format!("{}+{}i", self.a, self.b)) -//! } -//! } -//! -//! // Only generate `RustcEncodable` trait implementation -//! #[derive(Encodable)] -//! pub struct ComplexNumRecord { -//! uid: u8, -//! dsc: String, -//! val: Json, -//! } -//! -//! fn main() { -//! let num = ComplexNum { a: 0.0001, b: 12.539 }; -//! let data: String = json::encode(&ComplexNumRecord{ -//! uid: 1, -//! dsc: "test".to_string(), -//! val: num.to_json(), -//! }).unwrap(); -//! println!("data: {}", data); -//! // data: {"uid":1,"dsc":"test","val":"0.0001+12.539i"}; -//! } -//! ``` -//! -//! ### Verbose example of `ToJson` usage -//! -//! ```notrust -//! // FIXME(#19470): this cannot be ```rust``` because it fails orphan checking at the moment -//! extern crate serialize; -//! use std::collections::BTreeMap; -//! use serialize::json::{self, Json, ToJson}; -//! -//! // Only generate `Decodable` trait implementation -//! #[derive(Decodable)] -//! pub struct TestStruct { -//! data_int: u8, -//! data_str: String, -//! data_vector: Vec, -//! } -//! -//! // Specify encoding method manually -//! impl ToJson for TestStruct { -//! fn to_json(&self) -> Json { -//! let mut d = BTreeMap::new(); -//! // All standard types implement `to_json()`, so use it -//! d.insert("data_int".to_string(), self.data_int.to_json()); -//! d.insert("data_str".to_string(), self.data_str.to_json()); -//! d.insert("data_vector".to_string(), self.data_vector.to_json()); -//! Json::Object(d) -//! } -//! } -//! -//! fn main() { -//! // Serialize using `ToJson` -//! let input_data = TestStruct { -//! data_int: 1, -//! data_str: "madoka".to_string(), -//! data_vector: vec![2,3,4,5], -//! }; -//! let json_obj: Json = input_data.to_json(); -//! let json_str: String = json_obj.to_string(); -//! -//! // Deserialize like before -//! let decoded: TestStruct = json::decode(json_str)).unwrap(); -//! } -//! ``` - -use self::JsonEvent::*; -use self::ErrorCode::*; -use self::ParserError::*; -use self::DecoderError::*; -use self::ParserState::*; -use self::InternalStackElement::*; - -use std::collections::{HashMap, BTreeMap}; -use std::io::prelude::*; -use std::io; -use std::mem::swap; -use std::num::FpCategory as Fp; -use std::ops::Index; -use std::str::FromStr; -use std::string; -use std::{char, f64, fmt, str}; -use std; -use rustc_unicode::str as unicode_str; -use rustc_unicode::str::Utf16Item; - -use Encodable; - -/// Represents a json value -#[derive(Clone, PartialEq, PartialOrd, Debug)] -pub enum Json { - I64(i64), - U64(u64), - F64(f64), - String(string::String), - Boolean(bool), - Array(self::Array), - Object(self::Object), - Null, -} - -pub type Array = Vec; -pub type Object = BTreeMap; - -pub struct PrettyJson<'a> { inner: &'a Json } - -pub struct AsJson<'a, T: 'a> { inner: &'a T } -pub struct AsPrettyJson<'a, T: 'a> { inner: &'a T, indent: Option } - -/// The errors that can arise while parsing a JSON stream. -#[derive(Clone, Copy, PartialEq, Debug)] -pub enum ErrorCode { - InvalidSyntax, - InvalidNumber, - EOFWhileParsingObject, - EOFWhileParsingArray, - EOFWhileParsingValue, - EOFWhileParsingString, - KeyMustBeAString, - ExpectedColon, - TrailingCharacters, - TrailingComma, - InvalidEscape, - InvalidUnicodeCodePoint, - LoneLeadingSurrogateInHexEscape, - UnexpectedEndOfHexEscape, - UnrecognizedHex, - NotFourDigit, - NotUtf8, -} - -#[derive(Clone, PartialEq, Debug)] -pub enum ParserError { - /// msg, line, col - SyntaxError(ErrorCode, usize, usize), - IoError(io::ErrorKind, String), -} - -// Builder and Parser have the same errors. -pub type BuilderError = ParserError; - -#[derive(Clone, PartialEq, Debug)] -pub enum DecoderError { - ParseError(ParserError), - ExpectedError(string::String, string::String), - MissingFieldError(string::String), - UnknownVariantError(string::String), - ApplicationError(string::String) -} - -#[derive(Copy, Clone, Debug)] -pub enum EncoderError { - FmtError(fmt::Error), - BadHashmapKey, -} - -/// Returns a readable error string for a given error code. -pub fn error_str(error: ErrorCode) -> &'static str { - match error { - InvalidSyntax => "invalid syntax", - InvalidNumber => "invalid number", - EOFWhileParsingObject => "EOF While parsing object", - EOFWhileParsingArray => "EOF While parsing array", - EOFWhileParsingValue => "EOF While parsing value", - EOFWhileParsingString => "EOF While parsing string", - KeyMustBeAString => "key must be a string", - ExpectedColon => "expected `:`", - TrailingCharacters => "trailing characters", - TrailingComma => "trailing comma", - InvalidEscape => "invalid escape", - UnrecognizedHex => "invalid \\u{ esc}ape (unrecognized hex)", - NotFourDigit => "invalid \\u{ esc}ape (not four digits)", - NotUtf8 => "contents not utf-8", - InvalidUnicodeCodePoint => "invalid Unicode code point", - LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape", - UnexpectedEndOfHexEscape => "unexpected end of hex escape", - } -} - -/// Shortcut function to decode a JSON `&str` into an object -pub fn decode(s: &str) -> DecodeResult { - let json = match from_str(s) { - Ok(x) => x, - Err(e) => return Err(ParseError(e)) - }; - - let mut decoder = Decoder::new(json); - ::Decodable::decode(&mut decoder) -} - -/// Shortcut function to encode a `T` into a JSON `String` -pub fn encode(object: &T) -> Result { - let mut s = String::new(); - { - let mut encoder = Encoder::new(&mut s); - try!(object.encode(&mut encoder)); - } - Ok(s) -} - -impl fmt::Display for ErrorCode { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - error_str(*self).fmt(f) - } -} - -fn io_error_to_error(io: io::Error) -> ParserError { - IoError(io.kind(), io.to_string()) -} - -impl fmt::Display for ParserError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // FIXME this should be a nicer error - fmt::Debug::fmt(self, f) - } -} - -impl fmt::Display for DecoderError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // FIXME this should be a nicer error - fmt::Debug::fmt(self, f) - } -} - -impl std::error::Error for DecoderError { - fn description(&self) -> &str { "decoder error" } -} - -impl fmt::Display for EncoderError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // FIXME this should be a nicer error - fmt::Debug::fmt(self, f) - } -} - -impl std::error::Error for EncoderError { - fn description(&self) -> &str { "encoder error" } -} - -impl From for EncoderError { - fn from(err: fmt::Error) -> EncoderError { EncoderError::FmtError(err) } -} - -pub type EncodeResult = Result<(), EncoderError>; -pub type DecodeResult = Result; - -fn escape_str(wr: &mut fmt::Write, v: &str) -> EncodeResult { - try!(wr.write_str("\"")); - - let mut start = 0; - - for (i, byte) in v.bytes().enumerate() { - let escaped = match byte { - b'"' => "\\\"", - b'\\' => "\\\\", - b'\x00' => "\\u0000", - b'\x01' => "\\u0001", - b'\x02' => "\\u0002", - b'\x03' => "\\u0003", - b'\x04' => "\\u0004", - b'\x05' => "\\u0005", - b'\x06' => "\\u0006", - b'\x07' => "\\u0007", - b'\x08' => "\\b", - b'\t' => "\\t", - b'\n' => "\\n", - b'\x0b' => "\\u000b", - b'\x0c' => "\\f", - b'\r' => "\\r", - b'\x0e' => "\\u000e", - b'\x0f' => "\\u000f", - b'\x10' => "\\u0010", - b'\x11' => "\\u0011", - b'\x12' => "\\u0012", - b'\x13' => "\\u0013", - b'\x14' => "\\u0014", - b'\x15' => "\\u0015", - b'\x16' => "\\u0016", - b'\x17' => "\\u0017", - b'\x18' => "\\u0018", - b'\x19' => "\\u0019", - b'\x1a' => "\\u001a", - b'\x1b' => "\\u001b", - b'\x1c' => "\\u001c", - b'\x1d' => "\\u001d", - b'\x1e' => "\\u001e", - b'\x1f' => "\\u001f", - b'\x7f' => "\\u007f", - _ => { continue; } - }; - - if start < i { - try!(wr.write_str(&v[start..i])); - } - - try!(wr.write_str(escaped)); - - start = i + 1; - } - - if start != v.len() { - try!(wr.write_str(&v[start..])); - } - - try!(wr.write_str("\"")); - Ok(()) -} - -fn escape_char(writer: &mut fmt::Write, v: char) -> EncodeResult { - let mut buf = [0; 4]; - let n = v.encode_utf8(&mut buf).unwrap(); - let buf = unsafe { str::from_utf8_unchecked(&buf[..n]) }; - escape_str(writer, buf) -} - -fn spaces(wr: &mut fmt::Write, mut n: usize) -> EncodeResult { - const BUF: &'static str = " "; - - while n >= BUF.len() { - try!(wr.write_str(BUF)); - n -= BUF.len(); - } - - if n > 0 { - try!(wr.write_str(&BUF[..n])); - } - Ok(()) -} - -fn fmt_number_or_null(v: f64) -> string::String { - match v.classify() { - Fp::Nan | Fp::Infinite => string::String::from_str("null"), - _ if v.fract() != 0f64 => v.to_string(), - _ => v.to_string() + ".0", - } -} - -/// A structure for implementing serialization to JSON. -pub struct Encoder<'a> { - writer: &'a mut (fmt::Write+'a), - is_emitting_map_key: bool, -} - -impl<'a> Encoder<'a> { - /// Creates a new JSON encoder whose output will be written to the writer - /// specified. - pub fn new(writer: &'a mut fmt::Write) -> Encoder<'a> { - Encoder { writer: writer, is_emitting_map_key: false, } - } -} - -macro_rules! emit_enquoted_if_mapkey { - ($enc:ident,$e:expr) => { - if $enc.is_emitting_map_key { - try!(write!($enc.writer, "\"{}\"", $e)); - Ok(()) - } else { - try!(write!($enc.writer, "{}", $e)); - Ok(()) - } - } -} - -impl<'a> ::Encoder for Encoder<'a> { - type Error = EncoderError; - - fn emit_nil(&mut self) -> EncodeResult { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, "null")); - Ok(()) - } - - fn emit_uint(&mut self, v: usize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u64(&mut self, v: u64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u32(&mut self, v: u32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u16(&mut self, v: u16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u8(&mut self, v: u8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - - fn emit_int(&mut self, v: isize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i64(&mut self, v: i64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i32(&mut self, v: i32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i16(&mut self, v: i16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i8(&mut self, v: i8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - - fn emit_bool(&mut self, v: bool) -> EncodeResult { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if v { - try!(write!(self.writer, "true")); - } else { - try!(write!(self.writer, "false")); - } - Ok(()) - } - - fn emit_f64(&mut self, v: f64) -> EncodeResult { - emit_enquoted_if_mapkey!(self, fmt_number_or_null(v)) - } - fn emit_f32(&mut self, v: f32) -> EncodeResult { - self.emit_f64(v as f64) - } - - fn emit_char(&mut self, v: char) -> EncodeResult { - escape_char(self.writer, v) - } - fn emit_str(&mut self, v: &str) -> EncodeResult { - escape_str(self.writer, v) - } - - fn emit_enum(&mut self, _name: &str, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - f(self) - } - - fn emit_enum_variant(&mut self, - name: &str, - _id: usize, - cnt: usize, - f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - // enums are encoded as strings or objects - // Bunny => "Bunny" - // Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]} - if cnt == 0 { - escape_str(self.writer, name) - } else { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, "{{\"variant\":")); - try!(escape_str(self.writer, name)); - try!(write!(self.writer, ",\"fields\":[")); - try!(f(self)); - try!(write!(self.writer, "]}}")); - Ok(()) - } - } - - fn emit_enum_variant_arg(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx != 0 { - try!(write!(self.writer, ",")); - } - f(self) - } - - fn emit_enum_struct_variant(&mut self, - name: &str, - id: usize, - cnt: usize, - f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_enum_variant(name, id, cnt, f) - } - - fn emit_enum_struct_variant_field(&mut self, - _: &str, - idx: usize, - f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_enum_variant_arg(idx, f) - } - - fn emit_struct(&mut self, _: &str, _: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, "{{")); - try!(f(self)); - try!(write!(self.writer, "}}")); - Ok(()) - } - - fn emit_struct_field(&mut self, name: &str, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx != 0 { try!(write!(self.writer, ",")); } - try!(escape_str(self.writer, name)); - try!(write!(self.writer, ":")); - f(self) - } - - fn emit_tuple(&mut self, len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq(len, f) - } - fn emit_tuple_arg(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq_elt(idx, f) - } - - fn emit_tuple_struct(&mut self, _name: &str, len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq(len, f) - } - fn emit_tuple_struct_arg(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq_elt(idx, f) - } - - fn emit_option(&mut self, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - f(self) - } - fn emit_option_none(&mut self) -> EncodeResult { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_nil() - } - fn emit_option_some(&mut self, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - f(self) - } - - fn emit_seq(&mut self, _len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, "[")); - try!(f(self)); - try!(write!(self.writer, "]")); - Ok(()) - } - - fn emit_seq_elt(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx != 0 { - try!(write!(self.writer, ",")); - } - f(self) - } - - fn emit_map(&mut self, _len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, "{{")); - try!(f(self)); - try!(write!(self.writer, "}}")); - Ok(()) - } - - fn emit_map_elt_key(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx != 0 { try!(write!(self.writer, ",")) } - self.is_emitting_map_key = true; - try!(f(self)); - self.is_emitting_map_key = false; - Ok(()) - } - - fn emit_map_elt_val(&mut self, _idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut Encoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, ":")); - f(self) - } -} - -/// Another encoder for JSON, but prints out human-readable JSON instead of -/// compact data -pub struct PrettyEncoder<'a> { - writer: &'a mut (fmt::Write+'a), - curr_indent: usize, - indent: usize, - is_emitting_map_key: bool, -} - -impl<'a> PrettyEncoder<'a> { - /// Creates a new encoder whose output will be written to the specified writer - pub fn new(writer: &'a mut fmt::Write) -> PrettyEncoder<'a> { - PrettyEncoder { - writer: writer, - curr_indent: 0, - indent: 2, - is_emitting_map_key: false, - } - } - - /// Set the number of spaces to indent for each level. - /// This is safe to set during encoding. - pub fn set_indent(&mut self, indent: usize) { - // self.indent very well could be 0 so we need to use checked division. - let level = self.curr_indent.checked_div(self.indent).unwrap_or(0); - self.indent = indent; - self.curr_indent = level * self.indent; - } -} - -impl<'a> ::Encoder for PrettyEncoder<'a> { - type Error = EncoderError; - - fn emit_nil(&mut self) -> EncodeResult { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, "null")); - Ok(()) - } - - fn emit_uint(&mut self, v: usize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u64(&mut self, v: u64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u32(&mut self, v: u32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u16(&mut self, v: u16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_u8(&mut self, v: u8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - - fn emit_int(&mut self, v: isize) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i64(&mut self, v: i64) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i32(&mut self, v: i32) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i16(&mut self, v: i16) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - fn emit_i8(&mut self, v: i8) -> EncodeResult { emit_enquoted_if_mapkey!(self, v) } - - fn emit_bool(&mut self, v: bool) -> EncodeResult { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if v { - try!(write!(self.writer, "true")); - } else { - try!(write!(self.writer, "false")); - } - Ok(()) - } - - fn emit_f64(&mut self, v: f64) -> EncodeResult { - emit_enquoted_if_mapkey!(self, fmt_number_or_null(v)) - } - fn emit_f32(&mut self, v: f32) -> EncodeResult { - self.emit_f64(v as f64) - } - - fn emit_char(&mut self, v: char) -> EncodeResult { - escape_char(self.writer, v) - } - fn emit_str(&mut self, v: &str) -> EncodeResult { - escape_str(self.writer, v) - } - - fn emit_enum(&mut self, _name: &str, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - f(self) - } - - fn emit_enum_variant(&mut self, - name: &str, - _id: usize, - cnt: usize, - f: F) - -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if cnt == 0 { - escape_str(self.writer, name) - } else { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, "{{\n")); - self.curr_indent += self.indent; - try!(spaces(self.writer, self.curr_indent)); - try!(write!(self.writer, "\"variant\": ")); - try!(escape_str(self.writer, name)); - try!(write!(self.writer, ",\n")); - try!(spaces(self.writer, self.curr_indent)); - try!(write!(self.writer, "\"fields\": [\n")); - self.curr_indent += self.indent; - try!(f(self)); - self.curr_indent -= self.indent; - try!(write!(self.writer, "\n")); - try!(spaces(self.writer, self.curr_indent)); - self.curr_indent -= self.indent; - try!(write!(self.writer, "]\n")); - try!(spaces(self.writer, self.curr_indent)); - try!(write!(self.writer, "}}")); - Ok(()) - } - } - - fn emit_enum_variant_arg(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx != 0 { - try!(write!(self.writer, ",\n")); - } - try!(spaces(self.writer, self.curr_indent)); - f(self) - } - - fn emit_enum_struct_variant(&mut self, - name: &str, - id: usize, - cnt: usize, - f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_enum_variant(name, id, cnt, f) - } - - fn emit_enum_struct_variant_field(&mut self, - _: &str, - idx: usize, - f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_enum_variant_arg(idx, f) - } - - - fn emit_struct(&mut self, _: &str, len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if len == 0 { - try!(write!(self.writer, "{{}}")); - } else { - try!(write!(self.writer, "{{")); - self.curr_indent += self.indent; - try!(f(self)); - self.curr_indent -= self.indent; - try!(write!(self.writer, "\n")); - try!(spaces(self.writer, self.curr_indent)); - try!(write!(self.writer, "}}")); - } - Ok(()) - } - - fn emit_struct_field(&mut self, name: &str, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx == 0 { - try!(write!(self.writer, "\n")); - } else { - try!(write!(self.writer, ",\n")); - } - try!(spaces(self.writer, self.curr_indent)); - try!(escape_str(self.writer, name)); - try!(write!(self.writer, ": ")); - f(self) - } - - fn emit_tuple(&mut self, len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq(len, f) - } - fn emit_tuple_arg(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq_elt(idx, f) - } - - fn emit_tuple_struct(&mut self, _: &str, len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq(len, f) - } - fn emit_tuple_struct_arg(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_seq_elt(idx, f) - } - - fn emit_option(&mut self, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - f(self) - } - fn emit_option_none(&mut self) -> EncodeResult { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - self.emit_nil() - } - fn emit_option_some(&mut self, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - f(self) - } - - fn emit_seq(&mut self, len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if len == 0 { - try!(write!(self.writer, "[]")); - } else { - try!(write!(self.writer, "[")); - self.curr_indent += self.indent; - try!(f(self)); - self.curr_indent -= self.indent; - try!(write!(self.writer, "\n")); - try!(spaces(self.writer, self.curr_indent)); - try!(write!(self.writer, "]")); - } - Ok(()) - } - - fn emit_seq_elt(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx == 0 { - try!(write!(self.writer, "\n")); - } else { - try!(write!(self.writer, ",\n")); - } - try!(spaces(self.writer, self.curr_indent)); - f(self) - } - - fn emit_map(&mut self, len: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if len == 0 { - try!(write!(self.writer, "{{}}")); - } else { - try!(write!(self.writer, "{{")); - self.curr_indent += self.indent; - try!(f(self)); - self.curr_indent -= self.indent; - try!(write!(self.writer, "\n")); - try!(spaces(self.writer, self.curr_indent)); - try!(write!(self.writer, "}}")); - } - Ok(()) - } - - fn emit_map_elt_key(&mut self, idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - if idx == 0 { - try!(write!(self.writer, "\n")); - } else { - try!(write!(self.writer, ",\n")); - } - try!(spaces(self.writer, self.curr_indent)); - self.is_emitting_map_key = true; - try!(f(self)); - self.is_emitting_map_key = false; - Ok(()) - } - - fn emit_map_elt_val(&mut self, _idx: usize, f: F) -> EncodeResult where - F: FnOnce(&mut PrettyEncoder<'a>) -> EncodeResult, - { - if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } - try!(write!(self.writer, ": ")); - f(self) - } -} - -impl Encodable for Json { - fn encode(&self, e: &mut E) -> Result<(), E::Error> { - match *self { - Json::I64(v) => v.encode(e), - Json::U64(v) => v.encode(e), - Json::F64(v) => v.encode(e), - Json::String(ref v) => v.encode(e), - Json::Boolean(v) => v.encode(e), - Json::Array(ref v) => v.encode(e), - Json::Object(ref v) => v.encode(e), - Json::Null => e.emit_nil(), - } - } -} - -/// Create an `AsJson` wrapper which can be used to print a value as JSON -/// on-the-fly via `write!` -pub fn as_json(t: &T) -> AsJson { - AsJson { inner: t } -} - -/// Create an `AsPrettyJson` wrapper which can be used to print a value as JSON -/// on-the-fly via `write!` -pub fn as_pretty_json(t: &T) -> AsPrettyJson { - AsPrettyJson { inner: t, indent: None } -} - -impl Json { - /// Borrow this json object as a pretty object to generate a pretty - /// representation for it via `Display`. - pub fn pretty(&self) -> PrettyJson { - PrettyJson { inner: self } - } - - /// If the Json value is an Object, returns the value associated with the provided key. - /// Otherwise, returns None. - pub fn find<'a>(&'a self, key: &str) -> Option<&'a Json>{ - match self { - &Json::Object(ref map) => map.get(key), - _ => None - } - } - - /// Attempts to get a nested Json Object for each key in `keys`. - /// If any key is found not to exist, find_path will return None. - /// Otherwise, it will return the Json value associated with the final key. - pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{ - let mut target = self; - for key in keys { - match target.find(*key) { - Some(t) => { target = t; }, - None => return None - } - } - Some(target) - } - - /// If the Json value is an Object, performs a depth-first search until - /// a value associated with the provided key is found. If no value is found - /// or the Json value is not an Object, returns None. - pub fn search<'a>(&'a self, key: &str) -> Option<&'a Json> { - match self { - &Json::Object(ref map) => { - match map.get(key) { - Some(json_value) => Some(json_value), - None => { - for (_, v) in map { - match v.search(key) { - x if x.is_some() => return x, - _ => () - } - } - None - } - } - }, - _ => None - } - } - - /// Returns true if the Json value is an Object. Returns false otherwise. - pub fn is_object<'a>(&'a self) -> bool { - self.as_object().is_some() - } - - /// If the Json value is an Object, returns the associated BTreeMap. - /// Returns None otherwise. - pub fn as_object<'a>(&'a self) -> Option<&'a Object> { - match self { - &Json::Object(ref map) => Some(map), - _ => None - } - } - - /// Returns true if the Json value is an Array. Returns false otherwise. - pub fn is_array<'a>(&'a self) -> bool { - self.as_array().is_some() - } - - /// If the Json value is an Array, returns the associated vector. - /// Returns None otherwise. - pub fn as_array<'a>(&'a self) -> Option<&'a Array> { - match self { - &Json::Array(ref array) => Some(&*array), - _ => None - } - } - - /// Returns true if the Json value is a String. Returns false otherwise. - pub fn is_string<'a>(&'a self) -> bool { - self.as_string().is_some() - } - - /// If the Json value is a String, returns the associated str. - /// Returns None otherwise. - pub fn as_string<'a>(&'a self) -> Option<&'a str> { - match *self { - Json::String(ref s) => Some(&s[..]), - _ => None - } - } - - /// Returns true if the Json value is a Number. Returns false otherwise. - pub fn is_number(&self) -> bool { - match *self { - Json::I64(_) | Json::U64(_) | Json::F64(_) => true, - _ => false, - } - } - - /// Returns true if the Json value is a i64. Returns false otherwise. - pub fn is_i64(&self) -> bool { - match *self { - Json::I64(_) => true, - _ => false, - } - } - - /// Returns true if the Json value is a u64. Returns false otherwise. - pub fn is_u64(&self) -> bool { - match *self { - Json::U64(_) => true, - _ => false, - } - } - - /// Returns true if the Json value is a f64. Returns false otherwise. - pub fn is_f64(&self) -> bool { - match *self { - Json::F64(_) => true, - _ => false, - } - } - - /// If the Json value is a number, return or cast it to a i64. - /// Returns None otherwise. - pub fn as_i64(&self) -> Option { - match *self { - Json::I64(n) => Some(n), - Json::U64(n) => Some(n as i64), - _ => None - } - } - - /// If the Json value is a number, return or cast it to a u64. - /// Returns None otherwise. - pub fn as_u64(&self) -> Option { - match *self { - Json::I64(n) => Some(n as u64), - Json::U64(n) => Some(n), - _ => None - } - } - - /// If the Json value is a number, return or cast it to a f64. - /// Returns None otherwise. - pub fn as_f64(&self) -> Option { - match *self { - Json::I64(n) => Some(n as f64), - Json::U64(n) => Some(n as f64), - Json::F64(n) => Some(n), - _ => None - } - } - - /// Returns true if the Json value is a Boolean. Returns false otherwise. - pub fn is_boolean(&self) -> bool { - self.as_boolean().is_some() - } - - /// If the Json value is a Boolean, returns the associated bool. - /// Returns None otherwise. - pub fn as_boolean(&self) -> Option { - match self { - &Json::Boolean(b) => Some(b), - _ => None - } - } - - /// Returns true if the Json value is a Null. Returns false otherwise. - pub fn is_null(&self) -> bool { - self.as_null().is_some() - } - - /// If the Json value is a Null, returns (). - /// Returns None otherwise. - pub fn as_null(&self) -> Option<()> { - match self { - &Json::Null => Some(()), - _ => None - } - } -} - -impl<'a> Index<&'a str> for Json { - type Output = Json; - - fn index(&self, idx: &'a str) -> &Json { - self.find(idx).unwrap() - } -} - -impl Index for Json { - type Output = Json; - - fn index<'a>(&'a self, idx: usize) -> &'a Json { - match self { - &Json::Array(ref v) => &v[idx], - _ => panic!("can only index Json with usize if it is an array") - } - } -} - -/// The output of the streaming parser. -#[derive(PartialEq, Clone, Debug)] -pub enum JsonEvent { - ObjectStart, - ObjectEnd, - ArrayStart, - ArrayEnd, - BooleanValue(bool), - I64Value(i64), - U64Value(u64), - F64Value(f64), - StringValue(string::String), - NullValue, - Error(ParserError), -} - -#[derive(PartialEq, Debug)] -enum ParserState { - // Parse a value in an array, true means first element. - ParseArray(bool), - // Parse ',' or ']' after an element in an array. - ParseArrayComma, - // Parse a key:value in an object, true means first element. - ParseObject(bool), - // Parse ',' or ']' after an element in an object. - ParseObjectComma, - // Initial state. - ParseStart, - // Expecting the stream to end. - ParseBeforeFinish, - // Parsing can't continue. - ParseFinished, -} - -/// A Stack represents the current position of the parser in the logical -/// structure of the JSON stream. -/// For example foo.bar[3].x -pub struct Stack { - stack: Vec, - str_buffer: Vec, -} - -/// StackElements compose a Stack. -/// For example, StackElement::Key("foo"), StackElement::Key("bar"), -/// StackElement::Index(3) and StackElement::Key("x") are the -/// StackElements compositing the stack that represents foo.bar[3].x -#[derive(PartialEq, Clone, Debug)] -pub enum StackElement<'l> { - Index(u32), - Key(&'l str), -} - -// Internally, Key elements are stored as indices in a buffer to avoid -// allocating a string for every member of an object. -#[derive(PartialEq, Clone, Debug)] -enum InternalStackElement { - InternalIndex(u32), - InternalKey(u16, u16), // start, size -} - -impl Stack { - pub fn new() -> Stack { - Stack { stack: Vec::new(), str_buffer: Vec::new() } - } - - /// Returns The number of elements in the Stack. - pub fn len(&self) -> usize { self.stack.len() } - - /// Returns true if the stack is empty. - pub fn is_empty(&self) -> bool { self.stack.is_empty() } - - /// Provides access to the StackElement at a given index. - /// lower indices are at the bottom of the stack while higher indices are - /// at the top. - pub fn get<'l>(&'l self, idx: usize) -> StackElement<'l> { - match self.stack[idx] { - InternalIndex(i) => StackElement::Index(i), - InternalKey(start, size) => { - StackElement::Key(str::from_utf8( - &self.str_buffer[start as usize .. start as usize + size as usize]) - .unwrap()) - } - } - } - - /// Compares this stack with an array of StackElements. - pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool { - if self.stack.len() != rhs.len() { return false; } - for i in 0..rhs.len() { - if self.get(i) != rhs[i] { return false; } - } - return true; - } - - /// Returns true if the bottom-most elements of this stack are the same as - /// the ones passed as parameter. - pub fn starts_with(&self, rhs: &[StackElement]) -> bool { - if self.stack.len() < rhs.len() { return false; } - for i in 0..rhs.len() { - if self.get(i) != rhs[i] { return false; } - } - return true; - } - - /// Returns true if the top-most elements of this stack are the same as - /// the ones passed as parameter. - pub fn ends_with(&self, rhs: &[StackElement]) -> bool { - if self.stack.len() < rhs.len() { return false; } - let offset = self.stack.len() - rhs.len(); - for i in 0..rhs.len() { - if self.get(i + offset) != rhs[i] { return false; } - } - return true; - } - - /// Returns the top-most element (if any). - pub fn top<'l>(&'l self) -> Option> { - return match self.stack.last() { - None => None, - Some(&InternalIndex(i)) => Some(StackElement::Index(i)), - Some(&InternalKey(start, size)) => { - Some(StackElement::Key(str::from_utf8( - &self.str_buffer[start as usize .. (start+size) as usize] - ).unwrap())) - } - } - } - - // Used by Parser to insert StackElement::Key elements at the top of the stack. - fn push_key(&mut self, key: string::String) { - self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16)); - for c in key.as_bytes() { - self.str_buffer.push(*c); - } - } - - // Used by Parser to insert StackElement::Index elements at the top of the stack. - fn push_index(&mut self, index: u32) { - self.stack.push(InternalIndex(index)); - } - - // Used by Parser to remove the top-most element of the stack. - fn pop(&mut self) { - assert!(!self.is_empty()); - match *self.stack.last().unwrap() { - InternalKey(_, sz) => { - let new_size = self.str_buffer.len() - sz as usize; - self.str_buffer.truncate(new_size); - } - InternalIndex(_) => {} - } - self.stack.pop(); - } - - // Used by Parser to test whether the top-most element is an index. - fn last_is_index(&self) -> bool { - if self.is_empty() { return false; } - return match *self.stack.last().unwrap() { - InternalIndex(_) => true, - _ => false, - } - } - - // Used by Parser to increment the index of the top-most element. - fn bump_index(&mut self) { - let len = self.stack.len(); - let idx = match *self.stack.last().unwrap() { - InternalIndex(i) => { i + 1 } - _ => { panic!(); } - }; - self.stack[len - 1] = InternalIndex(idx); - } -} - -/// A streaming JSON parser implemented as an iterator of JsonEvent, consuming -/// an iterator of char. -pub struct Parser { - rdr: T, - ch: Option, - line: usize, - col: usize, - // We maintain a stack representing where we are in the logical structure - // of the JSON stream. - stack: Stack, - // A state machine is kept to make it possible to interrupt and resume parsing. - state: ParserState, -} - -impl> Iterator for Parser { - type Item = JsonEvent; - - fn next(&mut self) -> Option { - if self.state == ParseFinished { - return None; - } - - if self.state == ParseBeforeFinish { - self.parse_whitespace(); - // Make sure there is no trailing characters. - if self.eof() { - self.state = ParseFinished; - return None; - } else { - return Some(self.error_event(TrailingCharacters)); - } - } - - return Some(self.parse()); - } -} - -impl> Parser { - /// Creates the JSON parser. - pub fn new(rdr: T) -> Parser { - let mut p = Parser { - rdr: rdr, - ch: Some('\x00'), - line: 1, - col: 0, - stack: Stack::new(), - state: ParseStart, - }; - p.bump(); - return p; - } - - /// Provides access to the current position in the logical structure of the - /// JSON stream. - pub fn stack<'l>(&'l self) -> &'l Stack { - return &self.stack; - } - - fn eof(&self) -> bool { self.ch.is_none() } - fn ch_or_null(&self) -> char { self.ch.unwrap_or('\x00') } - fn bump(&mut self) { - self.ch = self.rdr.next(); - - if self.ch_is('\n') { - self.line += 1; - self.col = 1; - } else { - self.col += 1; - } - } - - fn next_char(&mut self) -> Option { - self.bump(); - self.ch - } - fn ch_is(&self, c: char) -> bool { - self.ch == Some(c) - } - - fn error(&self, reason: ErrorCode) -> Result { - Err(SyntaxError(reason, self.line, self.col)) - } - - fn parse_whitespace(&mut self) { - while self.ch_is(' ') || - self.ch_is('\n') || - self.ch_is('\t') || - self.ch_is('\r') { self.bump(); } - } - - fn parse_number(&mut self) -> JsonEvent { - let mut neg = false; - - if self.ch_is('-') { - self.bump(); - neg = true; - } - - let res = match self.parse_u64() { - Ok(res) => res, - Err(e) => { return Error(e); } - }; - - if self.ch_is('.') || self.ch_is('e') || self.ch_is('E') { - let mut res = res as f64; - - if self.ch_is('.') { - res = match self.parse_decimal(res) { - Ok(res) => res, - Err(e) => { return Error(e); } - }; - } - - if self.ch_is('e') || self.ch_is('E') { - res = match self.parse_exponent(res) { - Ok(res) => res, - Err(e) => { return Error(e); } - }; - } - - if neg { - res *= -1.0; - } - - F64Value(res) - } else { - if neg { - let res = (res as i64).wrapping_neg(); - - // Make sure we didn't underflow. - if res > 0 { - Error(SyntaxError(InvalidNumber, self.line, self.col)) - } else { - I64Value(res) - } - } else { - U64Value(res) - } - } - } - - #[allow(deprecated)] // possible resolve bug is mapping these to traits - fn parse_u64(&mut self) -> Result { - let mut accum = 0u64; - let last_accum = 0; // necessary to detect overflow. - - match self.ch_or_null() { - '0' => { - self.bump(); - - // A leading '0' must be the only digit before the decimal point. - match self.ch_or_null() { - '0' ... '9' => return self.error(InvalidNumber), - _ => () - } - }, - '1' ... '9' => { - while !self.eof() { - match self.ch_or_null() { - c @ '0' ... '9' => { - accum = accum.wrapping_mul(10); - accum = accum.wrapping_add((c as u64) - ('0' as u64)); - - // Detect overflow by comparing to the last value. - if accum <= last_accum { return self.error(InvalidNumber); } - - self.bump(); - } - _ => break, - } - } - } - _ => return self.error(InvalidNumber), - } - - Ok(accum) - } - - fn parse_decimal(&mut self, mut res: f64) -> Result { - self.bump(); - - // Make sure a digit follows the decimal place. - match self.ch_or_null() { - '0' ... '9' => (), - _ => return self.error(InvalidNumber) - } - - let mut dec = 1.0; - while !self.eof() { - match self.ch_or_null() { - c @ '0' ... '9' => { - dec /= 10.0; - res += (((c as isize) - ('0' as isize)) as f64) * dec; - self.bump(); - } - _ => break, - } - } - - Ok(res) - } - - fn parse_exponent(&mut self, mut res: f64) -> Result { - self.bump(); - - let mut exp = 0; - let mut neg_exp = false; - - if self.ch_is('+') { - self.bump(); - } else if self.ch_is('-') { - self.bump(); - neg_exp = true; - } - - // Make sure a digit follows the exponent place. - match self.ch_or_null() { - '0' ... '9' => (), - _ => return self.error(InvalidNumber) - } - while !self.eof() { - match self.ch_or_null() { - c @ '0' ... '9' => { - exp *= 10; - exp += (c as usize) - ('0' as usize); - - self.bump(); - } - _ => break - } - } - - let exp = 10_f64.powi(exp as i32); - if neg_exp { - res /= exp; - } else { - res *= exp; - } - - Ok(res) - } - - fn decode_hex_escape(&mut self) -> Result { - let mut i = 0; - let mut n = 0; - while i < 4 && !self.eof() { - self.bump(); - n = match self.ch_or_null() { - c @ '0' ... '9' => n * 16 + ((c as u16) - ('0' as u16)), - 'a' | 'A' => n * 16 + 10, - 'b' | 'B' => n * 16 + 11, - 'c' | 'C' => n * 16 + 12, - 'd' | 'D' => n * 16 + 13, - 'e' | 'E' => n * 16 + 14, - 'f' | 'F' => n * 16 + 15, - _ => return self.error(InvalidEscape) - }; - - i += 1; - } - - // Error out if we didn't parse 4 digits. - if i != 4 { - return self.error(InvalidEscape); - } - - Ok(n) - } - - fn parse_str(&mut self) -> Result { - let mut escape = false; - let mut res = string::String::new(); - - loop { - self.bump(); - if self.eof() { - return self.error(EOFWhileParsingString); - } - - if escape { - match self.ch_or_null() { - '"' => res.push('"'), - '\\' => res.push('\\'), - '/' => res.push('/'), - 'b' => res.push('\x08'), - 'f' => res.push('\x0c'), - 'n' => res.push('\n'), - 'r' => res.push('\r'), - 't' => res.push('\t'), - 'u' => match try!(self.decode_hex_escape()) { - 0xDC00 ... 0xDFFF => { - return self.error(LoneLeadingSurrogateInHexEscape) - } - - // Non-BMP characters are encoded as a sequence of - // two hex escapes, representing UTF-16 surrogates. - n1 @ 0xD800 ... 0xDBFF => { - match (self.next_char(), self.next_char()) { - (Some('\\'), Some('u')) => (), - _ => return self.error(UnexpectedEndOfHexEscape), - } - - let buf = [n1, try!(self.decode_hex_escape())]; - match unicode_str::utf16_items(&buf).next() { - Some(Utf16Item::ScalarValue(c)) => res.push(c), - _ => return self.error(LoneLeadingSurrogateInHexEscape), - } - } - - n => match char::from_u32(n as u32) { - Some(c) => res.push(c), - None => return self.error(InvalidUnicodeCodePoint), - }, - }, - _ => return self.error(InvalidEscape), - } - escape = false; - } else if self.ch_is('\\') { - escape = true; - } else { - match self.ch { - Some('"') => { - self.bump(); - return Ok(res); - }, - Some(c) => res.push(c), - None => unreachable!() - } - } - } - } - - // Invoked at each iteration, consumes the stream until it has enough - // information to return a JsonEvent. - // Manages an internal state so that parsing can be interrupted and resumed. - // Also keeps track of the position in the logical structure of the json - // stream isize the form of a stack that can be queried by the user using the - // stack() method. - fn parse(&mut self) -> JsonEvent { - loop { - // The only paths where the loop can spin a new iteration - // are in the cases ParseArrayComma and ParseObjectComma if ',' - // is parsed. In these cases the state is set to (respectively) - // ParseArray(false) and ParseObject(false), which always return, - // so there is no risk of getting stuck in an infinite loop. - // All other paths return before the end of the loop's iteration. - self.parse_whitespace(); - - match self.state { - ParseStart => { - return self.parse_start(); - } - ParseArray(first) => { - return self.parse_array(first); - } - ParseArrayComma => { - match self.parse_array_comma_or_end() { - Some(evt) => { return evt; } - None => {} - } - } - ParseObject(first) => { - return self.parse_object(first); - } - ParseObjectComma => { - self.stack.pop(); - if self.ch_is(',') { - self.state = ParseObject(false); - self.bump(); - } else { - return self.parse_object_end(); - } - } - _ => { - return self.error_event(InvalidSyntax); - } - } - } - } - - fn parse_start(&mut self) -> JsonEvent { - let val = self.parse_value(); - self.state = match val { - Error(_) => ParseFinished, - ArrayStart => ParseArray(true), - ObjectStart => ParseObject(true), - _ => ParseBeforeFinish, - }; - return val; - } - - fn parse_array(&mut self, first: bool) -> JsonEvent { - if self.ch_is(']') { - if !first { - self.error_event(InvalidSyntax) - } else { - self.state = if self.stack.is_empty() { - ParseBeforeFinish - } else if self.stack.last_is_index() { - ParseArrayComma - } else { - ParseObjectComma - }; - self.bump(); - ArrayEnd - } - } else { - if first { - self.stack.push_index(0); - } - let val = self.parse_value(); - self.state = match val { - Error(_) => ParseFinished, - ArrayStart => ParseArray(true), - ObjectStart => ParseObject(true), - _ => ParseArrayComma, - }; - val - } - } - - fn parse_array_comma_or_end(&mut self) -> Option { - if self.ch_is(',') { - self.stack.bump_index(); - self.state = ParseArray(false); - self.bump(); - None - } else if self.ch_is(']') { - self.stack.pop(); - self.state = if self.stack.is_empty() { - ParseBeforeFinish - } else if self.stack.last_is_index() { - ParseArrayComma - } else { - ParseObjectComma - }; - self.bump(); - Some(ArrayEnd) - } else if self.eof() { - Some(self.error_event(EOFWhileParsingArray)) - } else { - Some(self.error_event(InvalidSyntax)) - } - } - - fn parse_object(&mut self, first: bool) -> JsonEvent { - if self.ch_is('}') { - if !first { - if self.stack.is_empty() { - return self.error_event(TrailingComma); - } else { - self.stack.pop(); - } - } - self.state = if self.stack.is_empty() { - ParseBeforeFinish - } else if self.stack.last_is_index() { - ParseArrayComma - } else { - ParseObjectComma - }; - self.bump(); - return ObjectEnd; - } - if self.eof() { - return self.error_event(EOFWhileParsingObject); - } - if !self.ch_is('"') { - return self.error_event(KeyMustBeAString); - } - let s = match self.parse_str() { - Ok(s) => s, - Err(e) => { - self.state = ParseFinished; - return Error(e); - } - }; - self.parse_whitespace(); - if self.eof() { - return self.error_event(EOFWhileParsingObject); - } else if self.ch_or_null() != ':' { - return self.error_event(ExpectedColon); - } - self.stack.push_key(s); - self.bump(); - self.parse_whitespace(); - - let val = self.parse_value(); - - self.state = match val { - Error(_) => ParseFinished, - ArrayStart => ParseArray(true), - ObjectStart => ParseObject(true), - _ => ParseObjectComma, - }; - return val; - } - - fn parse_object_end(&mut self) -> JsonEvent { - if self.ch_is('}') { - self.state = if self.stack.is_empty() { - ParseBeforeFinish - } else if self.stack.last_is_index() { - ParseArrayComma - } else { - ParseObjectComma - }; - self.bump(); - ObjectEnd - } else if self.eof() { - self.error_event(EOFWhileParsingObject) - } else { - self.error_event(InvalidSyntax) - } - } - - fn parse_value(&mut self) -> JsonEvent { - if self.eof() { return self.error_event(EOFWhileParsingValue); } - match self.ch_or_null() { - 'n' => { self.parse_ident("ull", NullValue) } - 't' => { self.parse_ident("rue", BooleanValue(true)) } - 'f' => { self.parse_ident("alse", BooleanValue(false)) } - '0' ... '9' | '-' => self.parse_number(), - '"' => match self.parse_str() { - Ok(s) => StringValue(s), - Err(e) => Error(e), - }, - '[' => { - self.bump(); - ArrayStart - } - '{' => { - self.bump(); - ObjectStart - } - _ => { self.error_event(InvalidSyntax) } - } - } - - fn parse_ident(&mut self, ident: &str, value: JsonEvent) -> JsonEvent { - if ident.chars().all(|c| Some(c) == self.next_char()) { - self.bump(); - value - } else { - Error(SyntaxError(InvalidSyntax, self.line, self.col)) - } - } - - fn error_event(&mut self, reason: ErrorCode) -> JsonEvent { - self.state = ParseFinished; - Error(SyntaxError(reason, self.line, self.col)) - } -} - -/// A Builder consumes a json::Parser to create a generic Json structure. -pub struct Builder { - parser: Parser, - token: Option, -} - -impl> Builder { - /// Create a JSON Builder. - pub fn new(src: T) -> Builder { - Builder { parser: Parser::new(src), token: None, } - } - - // Decode a Json value from a Parser. - pub fn build(&mut self) -> Result { - self.bump(); - let result = self.build_value(); - self.bump(); - match self.token { - None => {} - Some(Error(ref e)) => { return Err(e.clone()); } - ref tok => { panic!("unexpected token {:?}", tok.clone()); } - } - result - } - - fn bump(&mut self) { - self.token = self.parser.next(); - } - - fn build_value(&mut self) -> Result { - return match self.token { - Some(NullValue) => Ok(Json::Null), - Some(I64Value(n)) => Ok(Json::I64(n)), - Some(U64Value(n)) => Ok(Json::U64(n)), - Some(F64Value(n)) => Ok(Json::F64(n)), - Some(BooleanValue(b)) => Ok(Json::Boolean(b)), - Some(StringValue(ref mut s)) => { - let mut temp = string::String::new(); - swap(s, &mut temp); - Ok(Json::String(temp)) - } - Some(Error(ref e)) => Err(e.clone()), - Some(ArrayStart) => self.build_array(), - Some(ObjectStart) => self.build_object(), - Some(ObjectEnd) => self.parser.error(InvalidSyntax), - Some(ArrayEnd) => self.parser.error(InvalidSyntax), - None => self.parser.error(EOFWhileParsingValue), - } - } - - fn build_array(&mut self) -> Result { - self.bump(); - let mut values = Vec::new(); - - loop { - if self.token == Some(ArrayEnd) { - return Ok(Json::Array(values.into_iter().collect())); - } - match self.build_value() { - Ok(v) => values.push(v), - Err(e) => { return Err(e) } - } - self.bump(); - } - } - - fn build_object(&mut self) -> Result { - self.bump(); - - let mut values = BTreeMap::new(); - - loop { - match self.token { - Some(ObjectEnd) => { return Ok(Json::Object(values)); } - Some(Error(ref e)) => { return Err(e.clone()); } - None => { break; } - _ => {} - } - let key = match self.parser.stack().top() { - Some(StackElement::Key(k)) => { k.to_string() } - _ => { panic!("invalid state"); } - }; - match self.build_value() { - Ok(value) => { values.insert(key, value); } - Err(e) => { return Err(e); } - } - self.bump(); - } - return self.parser.error(EOFWhileParsingObject); - } -} - -/// Decodes a json value from an `&mut io::Read` -pub fn from_reader(rdr: &mut Read) -> Result { - let mut contents = Vec::new(); - match rdr.read_to_end(&mut contents) { - Ok(c) => c, - Err(e) => return Err(io_error_to_error(e)) - }; - let s = match str::from_utf8(&contents).ok() { - Some(s) => s, - _ => return Err(SyntaxError(NotUtf8, 0, 0)) - }; - let mut builder = Builder::new(s.chars()); - builder.build() -} - -/// Decodes a json value from a string -pub fn from_str(s: &str) -> Result { - let mut builder = Builder::new(s.chars()); - builder.build() -} - -/// A structure to decode JSON to values in rust. -pub struct Decoder { - stack: Vec, -} - -impl Decoder { - /// Creates a new decoder instance for decoding the specified JSON value. - pub fn new(json: Json) -> Decoder { - Decoder { stack: vec![json] } - } -} - -impl Decoder { - fn pop(&mut self) -> Json { - self.stack.pop().unwrap() - } -} - -macro_rules! expect { - ($e:expr, Null) => ({ - match $e { - Json::Null => Ok(()), - other => Err(ExpectedError("Null".to_string(), - format!("{}", other))) - } - }); - ($e:expr, $t:ident) => ({ - match $e { - Json::$t(v) => Ok(v), - other => { - Err(ExpectedError(stringify!($t).to_string(), - format!("{}", other))) - } - } - }) -} - -macro_rules! read_primitive { - ($name:ident, $ty:ty) => { - fn $name(&mut self) -> DecodeResult<$ty> { - match self.pop() { - Json::I64(f) => Ok(f as $ty), - Json::U64(f) => Ok(f as $ty), - Json::F64(f) => Err(ExpectedError("Integer".to_string(), format!("{}", f))), - // re: #12967.. a type w/ numeric keys (ie HashMap etc) - // is going to have a string here, as per JSON spec. - Json::String(s) => match s.parse().ok() { - Some(f) => Ok(f), - None => Err(ExpectedError("Number".to_string(), s)), - }, - value => Err(ExpectedError("Number".to_string(), format!("{}", value))), - } - } - } -} - -impl ::Decoder for Decoder { - type Error = DecoderError; - - fn read_nil(&mut self) -> DecodeResult<()> { - expect!(self.pop(), Null) - } - - read_primitive! { read_uint, usize } - read_primitive! { read_u8, u8 } - read_primitive! { read_u16, u16 } - read_primitive! { read_u32, u32 } - read_primitive! { read_u64, u64 } - read_primitive! { read_int, isize } - read_primitive! { read_i8, i8 } - read_primitive! { read_i16, i16 } - read_primitive! { read_i32, i32 } - read_primitive! { read_i64, i64 } - - fn read_f32(&mut self) -> DecodeResult { self.read_f64().map(|x| x as f32) } - - fn read_f64(&mut self) -> DecodeResult { - match self.pop() { - Json::I64(f) => Ok(f as f64), - Json::U64(f) => Ok(f as f64), - Json::F64(f) => Ok(f), - Json::String(s) => { - // re: #12967.. a type w/ numeric keys (ie HashMap etc) - // is going to have a string here, as per JSON spec. - match s.parse().ok() { - Some(f) => Ok(f), - None => Err(ExpectedError("Number".to_string(), s)), - } - }, - Json::Null => Ok(f64::NAN), - value => Err(ExpectedError("Number".to_string(), format!("{}", value))) - } - } - - fn read_bool(&mut self) -> DecodeResult { - expect!(self.pop(), Boolean) - } - - fn read_char(&mut self) -> DecodeResult { - let s = try!(self.read_str()); - { - let mut it = s.chars(); - match (it.next(), it.next()) { - // exactly one character - (Some(c), None) => return Ok(c), - _ => () - } - } - Err(ExpectedError("single character string".to_string(), format!("{}", s))) - } - - fn read_str(&mut self) -> DecodeResult { - expect!(self.pop(), String) - } - - fn read_enum(&mut self, _name: &str, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - f(self) - } - - fn read_enum_variant(&mut self, names: &[&str], - mut f: F) -> DecodeResult - where F: FnMut(&mut Decoder, usize) -> DecodeResult, - { - let name = match self.pop() { - Json::String(s) => s, - Json::Object(mut o) => { - let n = match o.remove(&"variant".to_string()) { - Some(Json::String(s)) => s, - Some(val) => { - return Err(ExpectedError("String".to_string(), format!("{}", val))) - } - None => { - return Err(MissingFieldError("variant".to_string())) - } - }; - match o.remove(&"fields".to_string()) { - Some(Json::Array(l)) => { - for field in l.into_iter().rev() { - self.stack.push(field); - } - }, - Some(val) => { - return Err(ExpectedError("Array".to_string(), format!("{}", val))) - } - None => { - return Err(MissingFieldError("fields".to_string())) - } - } - n - } - json => { - return Err(ExpectedError("String or Object".to_string(), format!("{}", json))) - } - }; - let idx = match names.iter().position(|n| *n == &name[..]) { - Some(idx) => idx, - None => return Err(UnknownVariantError(name)) - }; - f(self, idx) - } - - fn read_enum_variant_arg(&mut self, _idx: usize, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - f(self) - } - - fn read_enum_struct_variant(&mut self, names: &[&str], f: F) -> DecodeResult where - F: FnMut(&mut Decoder, usize) -> DecodeResult, - { - self.read_enum_variant(names, f) - } - - - fn read_enum_struct_variant_field(&mut self, - _name: &str, - idx: usize, - f: F) - -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - self.read_enum_variant_arg(idx, f) - } - - fn read_struct(&mut self, _name: &str, _len: usize, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - let value = try!(f(self)); - self.pop(); - Ok(value) - } - - fn read_struct_field(&mut self, - name: &str, - _idx: usize, - f: F) - -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - let mut obj = try!(expect!(self.pop(), Object)); - - let value = match obj.remove(&name.to_string()) { - None => { - // Add a Null and try to parse it as an Option<_> - // to get None as a default value. - self.stack.push(Json::Null); - match f(self) { - Ok(x) => x, - Err(_) => return Err(MissingFieldError(name.to_string())), - } - }, - Some(json) => { - self.stack.push(json); - try!(f(self)) - } - }; - self.stack.push(Json::Object(obj)); - Ok(value) - } - - fn read_tuple(&mut self, tuple_len: usize, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - self.read_seq(move |d, len| { - if len == tuple_len { - f(d) - } else { - Err(ExpectedError(format!("Tuple{}", tuple_len), format!("Tuple{}", len))) - } - }) - } - - fn read_tuple_arg(&mut self, idx: usize, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - self.read_seq_elt(idx, f) - } - - fn read_tuple_struct(&mut self, - _name: &str, - len: usize, - f: F) - -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - self.read_tuple(len, f) - } - - fn read_tuple_struct_arg(&mut self, - idx: usize, - f: F) - -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - self.read_tuple_arg(idx, f) - } - - fn read_option(&mut self, mut f: F) -> DecodeResult where - F: FnMut(&mut Decoder, bool) -> DecodeResult, - { - match self.pop() { - Json::Null => f(self, false), - value => { self.stack.push(value); f(self, true) } - } - } - - fn read_seq(&mut self, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder, usize) -> DecodeResult, - { - let array = try!(expect!(self.pop(), Array)); - let len = array.len(); - for v in array.into_iter().rev() { - self.stack.push(v); - } - f(self, len) - } - - fn read_seq_elt(&mut self, _idx: usize, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - f(self) - } - - fn read_map(&mut self, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder, usize) -> DecodeResult, - { - let obj = try!(expect!(self.pop(), Object)); - let len = obj.len(); - for (key, value) in obj { - self.stack.push(value); - self.stack.push(Json::String(key)); - } - f(self, len) - } - - fn read_map_elt_key(&mut self, _idx: usize, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - f(self) - } - - fn read_map_elt_val(&mut self, _idx: usize, f: F) -> DecodeResult where - F: FnOnce(&mut Decoder) -> DecodeResult, - { - f(self) - } - - fn error(&mut self, err: &str) -> DecoderError { - ApplicationError(err.to_string()) - } -} - -/// A trait for converting values to JSON -pub trait ToJson { - /// Converts the value of `self` to an instance of JSON - fn to_json(&self) -> Json; -} - -macro_rules! to_json_impl_i64 { - ($($t:ty), +) => ( - $(impl ToJson for $t { - fn to_json(&self) -> Json { - Json::I64(*self as i64) - } - })+ - ) -} - -to_json_impl_i64! { isize, i8, i16, i32, i64 } - -macro_rules! to_json_impl_u64 { - ($($t:ty), +) => ( - $(impl ToJson for $t { - fn to_json(&self) -> Json { - Json::U64(*self as u64) - } - })+ - ) -} - -to_json_impl_u64! { usize, u8, u16, u32, u64 } - -impl ToJson for Json { - fn to_json(&self) -> Json { self.clone() } -} - -impl ToJson for f32 { - fn to_json(&self) -> Json { (*self as f64).to_json() } -} - -impl ToJson for f64 { - fn to_json(&self) -> Json { - match self.classify() { - Fp::Nan | Fp::Infinite => Json::Null, - _ => Json::F64(*self) - } - } -} - -impl ToJson for () { - fn to_json(&self) -> Json { Json::Null } -} - -impl ToJson for bool { - fn to_json(&self) -> Json { Json::Boolean(*self) } -} - -impl ToJson for str { - fn to_json(&self) -> Json { Json::String(self.to_string()) } -} - -impl ToJson for string::String { - fn to_json(&self) -> Json { Json::String((*self).clone()) } -} - -macro_rules! tuple_impl { - // use variables to indicate the arity of the tuple - ($($tyvar:ident),* ) => { - // the trailing commas are for the 1 tuple - impl< - $( $tyvar : ToJson ),* - > ToJson for ( $( $tyvar ),* , ) { - - #[inline] - #[allow(non_snake_case)] - fn to_json(&self) -> Json { - match *self { - ($(ref $tyvar),*,) => Json::Array(vec![$($tyvar.to_json()),*]) - } - } - } - } -} - -tuple_impl!{A} -tuple_impl!{A, B} -tuple_impl!{A, B, C} -tuple_impl!{A, B, C, D} -tuple_impl!{A, B, C, D, E} -tuple_impl!{A, B, C, D, E, F} -tuple_impl!{A, B, C, D, E, F, G} -tuple_impl!{A, B, C, D, E, F, G, H} -tuple_impl!{A, B, C, D, E, F, G, H, I} -tuple_impl!{A, B, C, D, E, F, G, H, I, J} -tuple_impl!{A, B, C, D, E, F, G, H, I, J, K} -tuple_impl!{A, B, C, D, E, F, G, H, I, J, K, L} - -impl ToJson for [A] { - fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } -} - -impl ToJson for Vec { - fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } -} - -impl ToJson for BTreeMap { - fn to_json(&self) -> Json { - let mut d = BTreeMap::new(); - for (key, value) in self { - d.insert((*key).clone(), value.to_json()); - } - Json::Object(d) - } -} - -impl ToJson for HashMap { - fn to_json(&self) -> Json { - let mut d = BTreeMap::new(); - for (key, value) in self { - d.insert((*key).clone(), value.to_json()); - } - Json::Object(d) - } -} - -impl ToJson for Option { - fn to_json(&self) -> Json { - match *self { - None => Json::Null, - Some(ref value) => value.to_json() - } - } -} - -struct FormatShim<'a, 'b: 'a> { - inner: &'a mut fmt::Formatter<'b>, -} - -impl<'a, 'b> fmt::Write for FormatShim<'a, 'b> { - fn write_str(&mut self, s: &str) -> fmt::Result { - match self.inner.write_str(s) { - Ok(_) => Ok(()), - Err(_) => Err(fmt::Error) - } - } -} - -impl fmt::Display for Json { - /// Encodes a json value into a string - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut shim = FormatShim { inner: f }; - let mut encoder = Encoder::new(&mut shim); - match self.encode(&mut encoder) { - Ok(_) => Ok(()), - Err(_) => Err(fmt::Error) - } - } -} - -impl<'a> fmt::Display for PrettyJson<'a> { - /// Encodes a json value into a string - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut shim = FormatShim { inner: f }; - let mut encoder = PrettyEncoder::new(&mut shim); - match self.inner.encode(&mut encoder) { - Ok(_) => Ok(()), - Err(_) => Err(fmt::Error) - } - } -} - -impl<'a, T: Encodable> fmt::Display for AsJson<'a, T> { - /// Encodes a json value into a string - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut shim = FormatShim { inner: f }; - let mut encoder = Encoder::new(&mut shim); - match self.inner.encode(&mut encoder) { - Ok(_) => Ok(()), - Err(_) => Err(fmt::Error) - } - } -} - -impl<'a, T> AsPrettyJson<'a, T> { - /// Set the indentation level for the emitted JSON - pub fn indent(mut self, indent: usize) -> AsPrettyJson<'a, T> { - self.indent = Some(indent); - self - } -} - -impl<'a, T: Encodable> fmt::Display for AsPrettyJson<'a, T> { - /// Encodes a json value into a string - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let mut shim = FormatShim { inner: f }; - let mut encoder = PrettyEncoder::new(&mut shim); - match self.indent { - Some(n) => encoder.set_indent(n), - None => {} - } - match self.inner.encode(&mut encoder) { - Ok(_) => Ok(()), - Err(_) => Err(fmt::Error) - } - } -} - -impl FromStr for Json { - type Err = BuilderError; - fn from_str(s: &str) -> Result { - from_str(s) - } -} - -#[cfg(test)] -mod tests { - extern crate test; - use self::Animal::*; - use self::DecodeEnum::*; - use self::test::Bencher; - use {Encodable, Decodable}; - use super::Json::*; - use super::ErrorCode::*; - use super::ParserError::*; - use super::DecoderError::*; - use super::JsonEvent::*; - use super::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, - StackElement, Stack, Decoder, Encoder, EncoderError}; - use std::{i64, u64, f32, f64}; - use std::io::prelude::*; - use std::collections::BTreeMap; - use std::string; - - #[derive(RustcDecodable, Eq, PartialEq, Debug)] - struct OptionData { - opt: Option, - } - - #[test] - fn test_decode_option_none() { - let s ="{}"; - let obj: OptionData = super::decode(s).unwrap(); - assert_eq!(obj, OptionData { opt: None }); - } - - #[test] - fn test_decode_option_some() { - let s = "{ \"opt\": 10 }"; - let obj: OptionData = super::decode(s).unwrap(); - assert_eq!(obj, OptionData { opt: Some(10) }); - } - - #[test] - fn test_decode_option_malformed() { - check_err::("{ \"opt\": [] }", - ExpectedError("Number".to_string(), "[]".to_string())); - check_err::("{ \"opt\": false }", - ExpectedError("Number".to_string(), "false".to_string())); - } - - #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] - enum Animal { - Dog, - Frog(string::String, isize) - } - - #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] - struct Inner { - a: (), - b: usize, - c: Vec, - } - - #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] - struct Outer { - inner: Vec, - } - - fn mk_object(items: &[(string::String, Json)]) -> Json { - let mut d = BTreeMap::new(); - - for item in items { - match *item { - (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); }, - } - }; - - Object(d) - } - - #[test] - fn test_from_str_trait() { - let s = "null"; - assert!(s.parse::().unwrap() == s.parse().unwrap()); - } - - #[test] - fn test_write_null() { - assert_eq!(Null.to_string(), "null"); - assert_eq!(Null.pretty().to_string(), "null"); - } - - #[test] - fn test_write_i64() { - assert_eq!(U64(0).to_string(), "0"); - assert_eq!(U64(0).pretty().to_string(), "0"); - - assert_eq!(U64(1234).to_string(), "1234"); - assert_eq!(U64(1234).pretty().to_string(), "1234"); - - assert_eq!(I64(-5678).to_string(), "-5678"); - assert_eq!(I64(-5678).pretty().to_string(), "-5678"); - - assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000"); - assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000"); - } - - #[test] - fn test_write_f64() { - assert_eq!(F64(3.0).to_string(), "3.0"); - assert_eq!(F64(3.0).pretty().to_string(), "3.0"); - - assert_eq!(F64(3.1).to_string(), "3.1"); - assert_eq!(F64(3.1).pretty().to_string(), "3.1"); - - assert_eq!(F64(-1.5).to_string(), "-1.5"); - assert_eq!(F64(-1.5).pretty().to_string(), "-1.5"); - - assert_eq!(F64(0.5).to_string(), "0.5"); - assert_eq!(F64(0.5).pretty().to_string(), "0.5"); - - assert_eq!(F64(f64::NAN).to_string(), "null"); - assert_eq!(F64(f64::NAN).pretty().to_string(), "null"); - - assert_eq!(F64(f64::INFINITY).to_string(), "null"); - assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null"); - - assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null"); - assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null"); - } - - #[test] - fn test_write_str() { - assert_eq!(String("".to_string()).to_string(), "\"\""); - assert_eq!(String("".to_string()).pretty().to_string(), "\"\""); - - assert_eq!(String("homura".to_string()).to_string(), "\"homura\""); - assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\""); - } - - #[test] - fn test_write_bool() { - assert_eq!(Boolean(true).to_string(), "true"); - assert_eq!(Boolean(true).pretty().to_string(), "true"); - - assert_eq!(Boolean(false).to_string(), "false"); - assert_eq!(Boolean(false).pretty().to_string(), "false"); - } - - #[test] - fn test_write_array() { - assert_eq!(Array(vec![]).to_string(), "[]"); - assert_eq!(Array(vec![]).pretty().to_string(), "[]"); - - assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]"); - assert_eq!( - Array(vec![Boolean(true)]).pretty().to_string(), - "\ - [\n \ - true\n\ - ]" - ); - - let long_test_array = Array(vec![ - Boolean(false), - Null, - Array(vec![String("foo\nbar".to_string()), F64(3.5)])]); - - assert_eq!(long_test_array.to_string(), - "[false,null,[\"foo\\nbar\",3.5]]"); - assert_eq!( - long_test_array.pretty().to_string(), - "\ - [\n \ - false,\n \ - null,\n \ - [\n \ - \"foo\\nbar\",\n \ - 3.5\n \ - ]\n\ - ]" - ); - } - - #[test] - fn test_write_object() { - assert_eq!(mk_object(&[]).to_string(), "{}"); - assert_eq!(mk_object(&[]).pretty().to_string(), "{}"); - - assert_eq!( - mk_object(&[ - ("a".to_string(), Boolean(true)) - ]).to_string(), - "{\"a\":true}" - ); - assert_eq!( - mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(), - "\ - {\n \ - \"a\": true\n\ - }" - ); - - let complex_obj = mk_object(&[ - ("b".to_string(), Array(vec![ - mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), - mk_object(&[("d".to_string(), String("".to_string()))]) - ])) - ]); - - assert_eq!( - complex_obj.to_string(), - "{\ - \"b\":[\ - {\"c\":\"\\f\\r\"},\ - {\"d\":\"\"}\ - ]\ - }" - ); - assert_eq!( - complex_obj.pretty().to_string(), - "\ - {\n \ - \"b\": [\n \ - {\n \ - \"c\": \"\\f\\r\"\n \ - },\n \ - {\n \ - \"d\": \"\"\n \ - }\n \ - ]\n\ - }" - ); - - let a = mk_object(&[ - ("a".to_string(), Boolean(true)), - ("b".to_string(), Array(vec![ - mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), - mk_object(&[("d".to_string(), String("".to_string()))]) - ])) - ]); - - // We can't compare the strings directly because the object fields be - // printed in a different order. - assert_eq!(a.clone(), a.to_string().parse().unwrap()); - assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap()); - } - - #[test] - fn test_write_enum() { - let animal = Dog; - assert_eq!( - format!("{}", super::as_json(&animal)), - "\"Dog\"" - ); - assert_eq!( - format!("{}", super::as_pretty_json(&animal)), - "\"Dog\"" - ); - - let animal = Frog("Henry".to_string(), 349); - assert_eq!( - format!("{}", super::as_json(&animal)), - "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}" - ); - assert_eq!( - format!("{}", super::as_pretty_json(&animal)), - "{\n \ - \"variant\": \"Frog\",\n \ - \"fields\": [\n \ - \"Henry\",\n \ - 349\n \ - ]\n\ - }" - ); - } - - macro_rules! check_encoder_for_simple { - ($value:expr, $expected:expr) => ({ - let s = format!("{}", super::as_json(&$value)); - assert_eq!(s, $expected); - - let s = format!("{}", super::as_pretty_json(&$value)); - assert_eq!(s, $expected); - }) - } - - #[test] - fn test_write_some() { - check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\""); - } - - #[test] - fn test_write_none() { - check_encoder_for_simple!(None::, "null"); - } - - #[test] - fn test_write_char() { - check_encoder_for_simple!('a', "\"a\""); - check_encoder_for_simple!('\t', "\"\\t\""); - check_encoder_for_simple!('\u{0000}', "\"\\u0000\""); - check_encoder_for_simple!('\u{001b}', "\"\\u001b\""); - check_encoder_for_simple!('\u{007f}', "\"\\u007f\""); - check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\""); - check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\""); - check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\""); - } - - #[test] - fn test_trailing_characters() { - assert_eq!(from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5))); - assert_eq!(from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5))); - assert_eq!(from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6))); - assert_eq!(from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2))); - assert_eq!(from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3))); - assert_eq!(from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3))); - } - - #[test] - fn test_read_identifiers() { - assert_eq!(from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2))); - assert_eq!(from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4))); - assert_eq!(from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2))); - assert_eq!(from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4))); - assert_eq!(from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2))); - assert_eq!(from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3))); - - assert_eq!(from_str("null"), Ok(Null)); - assert_eq!(from_str("true"), Ok(Boolean(true))); - assert_eq!(from_str("false"), Ok(Boolean(false))); - assert_eq!(from_str(" null "), Ok(Null)); - assert_eq!(from_str(" true "), Ok(Boolean(true))); - assert_eq!(from_str(" false "), Ok(Boolean(false))); - } - - #[test] - fn test_decode_identifiers() { - let v: () = super::decode("null").unwrap(); - assert_eq!(v, ()); - - let v: bool = super::decode("true").unwrap(); - assert_eq!(v, true); - - let v: bool = super::decode("false").unwrap(); - assert_eq!(v, false); - } - - #[test] - fn test_read_number() { - assert_eq!(from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1))); - assert_eq!(from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1))); - assert_eq!(from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1))); - assert_eq!(from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2))); - assert_eq!(from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2))); - assert_eq!(from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3))); - assert_eq!(from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3))); - assert_eq!(from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4))); - - assert_eq!(from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20))); - assert_eq!(from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21))); - - assert_eq!(from_str("3"), Ok(U64(3))); - assert_eq!(from_str("3.1"), Ok(F64(3.1))); - assert_eq!(from_str("-1.2"), Ok(F64(-1.2))); - assert_eq!(from_str("0.4"), Ok(F64(0.4))); - assert_eq!(from_str("0.4e5"), Ok(F64(0.4e5))); - assert_eq!(from_str("0.4e+15"), Ok(F64(0.4e15))); - assert_eq!(from_str("0.4e-01"), Ok(F64(0.4e-01))); - assert_eq!(from_str(" 3 "), Ok(U64(3))); - - assert_eq!(from_str("-9223372036854775808"), Ok(I64(i64::MIN))); - assert_eq!(from_str("9223372036854775807"), Ok(U64(i64::MAX as u64))); - assert_eq!(from_str("18446744073709551615"), Ok(U64(u64::MAX))); - } - - #[test] - fn test_decode_numbers() { - let v: f64 = super::decode("3").unwrap(); - assert_eq!(v, 3.0); - - let v: f64 = super::decode("3.1").unwrap(); - assert_eq!(v, 3.1); - - let v: f64 = super::decode("-1.2").unwrap(); - assert_eq!(v, -1.2); - - let v: f64 = super::decode("0.4").unwrap(); - assert_eq!(v, 0.4); - - let v: f64 = super::decode("0.4e5").unwrap(); - assert_eq!(v, 0.4e5); - - let v: f64 = super::decode("0.4e15").unwrap(); - assert_eq!(v, 0.4e15); - - let v: f64 = super::decode("0.4e-01").unwrap(); - assert_eq!(v, 0.4e-01); - - let v: u64 = super::decode("0").unwrap(); - assert_eq!(v, 0); - - let v: u64 = super::decode("18446744073709551615").unwrap(); - assert_eq!(v, u64::MAX); - - let v: i64 = super::decode("-9223372036854775808").unwrap(); - assert_eq!(v, i64::MIN); - - let v: i64 = super::decode("9223372036854775807").unwrap(); - assert_eq!(v, i64::MAX); - - let res: DecodeResult = super::decode("765.25"); - assert_eq!(res, Err(ExpectedError("Integer".to_string(), - "765.25".to_string()))); - } - - #[test] - fn test_read_str() { - assert_eq!(from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2))); - assert_eq!(from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5))); - - assert_eq!(from_str("\"\""), Ok(String("".to_string()))); - assert_eq!(from_str("\"foo\""), Ok(String("foo".to_string()))); - assert_eq!(from_str("\"\\\"\""), Ok(String("\"".to_string()))); - assert_eq!(from_str("\"\\b\""), Ok(String("\x08".to_string()))); - assert_eq!(from_str("\"\\n\""), Ok(String("\n".to_string()))); - assert_eq!(from_str("\"\\r\""), Ok(String("\r".to_string()))); - assert_eq!(from_str("\"\\t\""), Ok(String("\t".to_string()))); - assert_eq!(from_str(" \"foo\" "), Ok(String("foo".to_string()))); - assert_eq!(from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string()))); - assert_eq!(from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string()))); - } - - #[test] - fn test_decode_str() { - let s = [("\"\"", ""), - ("\"foo\"", "foo"), - ("\"\\\"\"", "\""), - ("\"\\b\"", "\x08"), - ("\"\\n\"", "\n"), - ("\"\\r\"", "\r"), - ("\"\\t\"", "\t"), - ("\"\\u12ab\"", "\u{12ab}"), - ("\"\\uAB12\"", "\u{AB12}")]; - - for &(i, o) in &s { - let v: string::String = super::decode(i).unwrap(); - assert_eq!(v, o); - } - } - - #[test] - fn test_read_array() { - assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); - assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); - assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); - assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); - assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); - - assert_eq!(from_str("[]"), Ok(Array(vec![]))); - assert_eq!(from_str("[ ]"), Ok(Array(vec![]))); - assert_eq!(from_str("[true]"), Ok(Array(vec![Boolean(true)]))); - assert_eq!(from_str("[ false ]"), Ok(Array(vec![Boolean(false)]))); - assert_eq!(from_str("[null]"), Ok(Array(vec![Null]))); - assert_eq!(from_str("[3, 1]"), - Ok(Array(vec![U64(3), U64(1)]))); - assert_eq!(from_str("\n[3, 2]\n"), - Ok(Array(vec![U64(3), U64(2)]))); - assert_eq!(from_str("[2, [4, 1]]"), - Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])]))); - } - - #[test] - fn test_decode_array() { - let v: Vec<()> = super::decode("[]").unwrap(); - assert_eq!(v, []); - - let v: Vec<()> = super::decode("[null]").unwrap(); - assert_eq!(v, [()]); - - let v: Vec = super::decode("[true]").unwrap(); - assert_eq!(v, [true]); - - let v: Vec = super::decode("[3, 1]").unwrap(); - assert_eq!(v, [3, 1]); - - let v: Vec> = super::decode("[[3], [1, 2]]").unwrap(); - assert_eq!(v, [vec![3], vec![1, 2]]); - } - - #[test] - fn test_decode_tuple() { - let t: (usize, usize, usize) = super::decode("[1, 2, 3]").unwrap(); - assert_eq!(t, (1, 2, 3)); - - let t: (usize, string::String) = super::decode("[1, \"two\"]").unwrap(); - assert_eq!(t, (1, "two".to_string())); - } - - #[test] - fn test_decode_tuple_malformed_types() { - assert!(super::decode::<(usize, string::String)>("[1, 2]").is_err()); - } - - #[test] - fn test_decode_tuple_malformed_length() { - assert!(super::decode::<(usize, usize)>("[1, 2, 3]").is_err()); - } - - #[test] - fn test_read_object() { - assert_eq!(from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2))); - assert_eq!(from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3))); - assert_eq!(from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2))); - assert_eq!(from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); - assert_eq!(from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5))); - assert_eq!(from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); - - assert_eq!(from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6))); - assert_eq!(from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6))); - assert_eq!(from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7))); - assert_eq!(from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8))); - assert_eq!(from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8))); - - assert_eq!(from_str("{}").unwrap(), mk_object(&[])); - assert_eq!(from_str("{\"a\": 3}").unwrap(), - mk_object(&[("a".to_string(), U64(3))])); - - assert_eq!(from_str( - "{ \"a\": null, \"b\" : true }").unwrap(), - mk_object(&[ - ("a".to_string(), Null), - ("b".to_string(), Boolean(true))])); - assert_eq!(from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(), - mk_object(&[ - ("a".to_string(), Null), - ("b".to_string(), Boolean(true))])); - assert_eq!(from_str( - "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(), - mk_object(&[ - ("a".to_string(), F64(1.0)), - ("b".to_string(), Array(vec![Boolean(true)])) - ])); - assert_eq!(from_str( - "{\ - \"a\": 1.0, \ - \"b\": [\ - true,\ - \"foo\\nbar\", \ - { \"c\": {\"d\": null} } \ - ]\ - }").unwrap(), - mk_object(&[ - ("a".to_string(), F64(1.0)), - ("b".to_string(), Array(vec![ - Boolean(true), - String("foo\nbar".to_string()), - mk_object(&[ - ("c".to_string(), mk_object(&[("d".to_string(), Null)])) - ]) - ])) - ])); - } - - #[test] - fn test_decode_struct() { - let s = "{ - \"inner\": [ - { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] } - ] - }"; - - let v: Outer = super::decode(s).unwrap(); - assert_eq!( - v, - Outer { - inner: vec![ - Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] } - ] - } - ); - } - - #[derive(RustcDecodable)] - struct FloatStruct { - f: f64, - a: Vec - } - #[test] - fn test_decode_struct_with_nan() { - let s = "{\"f\":null,\"a\":[null,123]}"; - let obj: FloatStruct = super::decode(s).unwrap(); - assert!(obj.f.is_nan()); - assert!(obj.a[0].is_nan()); - assert_eq!(obj.a[1], 123f64); - } - - #[test] - fn test_decode_option() { - let value: Option = super::decode("null").unwrap(); - assert_eq!(value, None); - - let value: Option = super::decode("\"jodhpurs\"").unwrap(); - assert_eq!(value, Some("jodhpurs".to_string())); - } - - #[test] - fn test_decode_enum() { - let value: Animal = super::decode("\"Dog\"").unwrap(); - assert_eq!(value, Dog); - - let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"; - let value: Animal = super::decode(s).unwrap(); - assert_eq!(value, Frog("Henry".to_string(), 349)); - } - - #[test] - fn test_decode_map() { - let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\ - \"fields\":[\"Henry\", 349]}}"; - let mut map: BTreeMap = super::decode(s).unwrap(); - - assert_eq!(map.remove(&"a".to_string()), Some(Dog)); - assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349))); - } - - #[test] - fn test_multiline_errors() { - assert_eq!(from_str("{\n \"foo\":\n \"bar\""), - Err(SyntaxError(EOFWhileParsingObject, 3, 8))); - } - - #[derive(RustcDecodable)] - #[allow(dead_code)] - struct DecodeStruct { - x: f64, - y: bool, - z: string::String, - w: Vec - } - #[derive(RustcDecodable)] - enum DecodeEnum { - A(f64), - B(string::String) - } - fn check_err(to_parse: &'static str, expected: DecoderError) { - let res: DecodeResult = match from_str(to_parse) { - Err(e) => Err(ParseError(e)), - Ok(json) => Decodable::decode(&mut Decoder::new(json)) - }; - match res { - Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`", - to_parse, expected), - Err(ParseError(e)) => panic!("`{:?}` is not valid json: {:?}", - to_parse, e), - Err(e) => { - assert_eq!(e, expected); - } - } - } - #[test] - fn test_decode_errors_struct() { - check_err::("[]", ExpectedError("Object".to_string(), "[]".to_string())); - check_err::("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}", - ExpectedError("Number".to_string(), "true".to_string())); - check_err::("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}", - ExpectedError("Boolean".to_string(), "[]".to_string())); - check_err::("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}", - ExpectedError("String".to_string(), "{}".to_string())); - check_err::("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}", - ExpectedError("Array".to_string(), "null".to_string())); - check_err::("{\"x\": 1, \"y\": true, \"z\": \"\"}", - MissingFieldError("w".to_string())); - } - #[test] - fn test_decode_errors_enum() { - check_err::("{}", - MissingFieldError("variant".to_string())); - check_err::("{\"variant\": 1}", - ExpectedError("String".to_string(), "1".to_string())); - check_err::("{\"variant\": \"A\"}", - MissingFieldError("fields".to_string())); - check_err::("{\"variant\": \"A\", \"fields\": null}", - ExpectedError("Array".to_string(), "null".to_string())); - check_err::("{\"variant\": \"C\", \"fields\": []}", - UnknownVariantError("C".to_string())); - } - - #[test] - fn test_find(){ - let json_value = from_str("{\"dog\" : \"cat\"}").unwrap(); - let found_str = json_value.find("dog"); - assert!(found_str.unwrap().as_string().unwrap() == "cat"); - } - - #[test] - fn test_find_path(){ - let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); - let found_str = json_value.find_path(&["dog", "cat", "mouse"]); - assert!(found_str.unwrap().as_string().unwrap() == "cheese"); - } - - #[test] - fn test_search(){ - let json_value = from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); - let found_str = json_value.search("mouse").and_then(|j| j.as_string()); - assert!(found_str.unwrap() == "cheese"); - } - - #[test] - fn test_index(){ - let json_value = from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap(); - let ref array = json_value["animals"]; - assert_eq!(array[0].as_string().unwrap(), "dog"); - assert_eq!(array[1].as_string().unwrap(), "cat"); - assert_eq!(array[2].as_string().unwrap(), "mouse"); - } - - #[test] - fn test_is_object(){ - let json_value = from_str("{}").unwrap(); - assert!(json_value.is_object()); - } - - #[test] - fn test_as_object(){ - let json_value = from_str("{}").unwrap(); - let json_object = json_value.as_object(); - assert!(json_object.is_some()); - } - - #[test] - fn test_is_array(){ - let json_value = from_str("[1, 2, 3]").unwrap(); - assert!(json_value.is_array()); - } - - #[test] - fn test_as_array(){ - let json_value = from_str("[1, 2, 3]").unwrap(); - let json_array = json_value.as_array(); - let expected_length = 3; - assert!(json_array.is_some() && json_array.unwrap().len() == expected_length); - } - - #[test] - fn test_is_string(){ - let json_value = from_str("\"dog\"").unwrap(); - assert!(json_value.is_string()); - } - - #[test] - fn test_as_string(){ - let json_value = from_str("\"dog\"").unwrap(); - let json_str = json_value.as_string(); - let expected_str = "dog"; - assert_eq!(json_str, Some(expected_str)); - } - - #[test] - fn test_is_number(){ - let json_value = from_str("12").unwrap(); - assert!(json_value.is_number()); - } - - #[test] - fn test_is_i64(){ - let json_value = from_str("-12").unwrap(); - assert!(json_value.is_i64()); - - let json_value = from_str("12").unwrap(); - assert!(!json_value.is_i64()); - - let json_value = from_str("12.0").unwrap(); - assert!(!json_value.is_i64()); - } - - #[test] - fn test_is_u64(){ - let json_value = from_str("12").unwrap(); - assert!(json_value.is_u64()); - - let json_value = from_str("-12").unwrap(); - assert!(!json_value.is_u64()); - - let json_value = from_str("12.0").unwrap(); - assert!(!json_value.is_u64()); - } - - #[test] - fn test_is_f64(){ - let json_value = from_str("12").unwrap(); - assert!(!json_value.is_f64()); - - let json_value = from_str("-12").unwrap(); - assert!(!json_value.is_f64()); - - let json_value = from_str("12.0").unwrap(); - assert!(json_value.is_f64()); - - let json_value = from_str("-12.0").unwrap(); - assert!(json_value.is_f64()); - } - - #[test] - fn test_as_i64(){ - let json_value = from_str("-12").unwrap(); - let json_num = json_value.as_i64(); - assert_eq!(json_num, Some(-12)); - } - - #[test] - fn test_as_u64(){ - let json_value = from_str("12").unwrap(); - let json_num = json_value.as_u64(); - assert_eq!(json_num, Some(12)); - } - - #[test] - fn test_as_f64(){ - let json_value = from_str("12.0").unwrap(); - let json_num = json_value.as_f64(); - assert_eq!(json_num, Some(12f64)); - } - - #[test] - fn test_is_boolean(){ - let json_value = from_str("false").unwrap(); - assert!(json_value.is_boolean()); - } - - #[test] - fn test_as_boolean(){ - let json_value = from_str("false").unwrap(); - let json_bool = json_value.as_boolean(); - let expected_bool = false; - assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool); - } - - #[test] - fn test_is_null(){ - let json_value = from_str("null").unwrap(); - assert!(json_value.is_null()); - } - - #[test] - fn test_as_null(){ - let json_value = from_str("null").unwrap(); - let json_null = json_value.as_null(); - let expected_null = (); - assert!(json_null.is_some() && json_null.unwrap() == expected_null); - } - - #[test] - fn test_encode_hashmap_with_numeric_key() { - use std::str::from_utf8; - use std::collections::HashMap; - let mut hm: HashMap = HashMap::new(); - hm.insert(1, true); - let mut mem_buf = Vec::new(); - write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(&mem_buf[..]).unwrap(); - match from_str(json_str) { - Err(_) => panic!("Unable to parse json_str: {:?}", json_str), - _ => {} // it parsed and we are good to go - } - } - - #[test] - fn test_prettyencode_hashmap_with_numeric_key() { - use std::str::from_utf8; - use std::collections::HashMap; - let mut hm: HashMap = HashMap::new(); - hm.insert(1, true); - let mut mem_buf = Vec::new(); - write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(&mem_buf[..]).unwrap(); - match from_str(json_str) { - Err(_) => panic!("Unable to parse json_str: {:?}", json_str), - _ => {} // it parsed and we are good to go - } - } - - #[test] - fn test_prettyencoder_indent_level_param() { - use std::str::from_utf8; - use std::collections::BTreeMap; - - let mut tree = BTreeMap::new(); - - tree.insert("hello".to_string(), String("guten tag".to_string())); - tree.insert("goodbye".to_string(), String("sayonara".to_string())); - - let json = Array( - // The following layout below should look a lot like - // the pretty-printed JSON (indent * x) - vec! - ( // 0x - String("greetings".to_string()), // 1x - Object(tree), // 1x + 2x + 2x + 1x - ) // 0x - // End JSON array (7 lines) - ); - - // Helper function for counting indents - fn indents(source: &str) -> usize { - let trimmed = source.trim_left_matches(' '); - source.len() - trimmed.len() - } - - // Test up to 4 spaces of indents (more?) - for i in 0..4 { - let mut writer = Vec::new(); - write!(&mut writer, "{}", - super::as_pretty_json(&json).indent(i)).unwrap(); - - let printed = from_utf8(&writer[..]).unwrap(); - - // Check for indents at each line - let lines: Vec<&str> = printed.lines().collect(); - assert_eq!(lines.len(), 7); // JSON should be 7 lines - - assert_eq!(indents(lines[0]), 0 * i); // [ - assert_eq!(indents(lines[1]), 1 * i); // "greetings", - assert_eq!(indents(lines[2]), 1 * i); // { - assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag", - assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara" - assert_eq!(indents(lines[5]), 1 * i); // }, - assert_eq!(indents(lines[6]), 0 * i); // ] - - // Finally, test that the pretty-printed JSON is valid - from_str(printed).ok().expect("Pretty-printed JSON is invalid!"); - } - } - - #[test] - fn test_hashmap_with_enum_key() { - use std::collections::HashMap; - use json; - #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)] - enum Enum { - Foo, - #[allow(dead_code)] - Bar, - } - let mut map = HashMap::new(); - map.insert(Enum::Foo, 0); - let result = json::encode(&map).unwrap(); - assert_eq!(&result[..], r#"{"Foo":0}"#); - let decoded: HashMap = json::decode(&result).unwrap(); - assert_eq!(map, decoded); - } - - #[test] - fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() { - use std::collections::HashMap; - use Decodable; - let json_str = "{\"1\":true}"; - let json_obj = match from_str(json_str) { - Err(_) => panic!("Unable to parse json_str: {:?}", json_str), - Ok(o) => o - }; - let mut decoder = Decoder::new(json_obj); - let _hm: HashMap = Decodable::decode(&mut decoder).unwrap(); - } - - #[test] - fn test_hashmap_with_numeric_key_will_error_with_string_keys() { - use std::collections::HashMap; - use Decodable; - let json_str = "{\"a\":true}"; - let json_obj = match from_str(json_str) { - Err(_) => panic!("Unable to parse json_str: {:?}", json_str), - Ok(o) => o - }; - let mut decoder = Decoder::new(json_obj); - let result: Result, DecoderError> = Decodable::decode(&mut decoder); - assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string()))); - } - - fn assert_stream_equal(src: &str, - expected: Vec<(JsonEvent, Vec)>) { - let mut parser = Parser::new(src.chars()); - let mut i = 0; - loop { - let evt = match parser.next() { - Some(e) => e, - None => { break; } - }; - let (ref expected_evt, ref expected_stack) = expected[i]; - if !parser.stack().is_equal_to(expected_stack) { - panic!("Parser stack is not equal to {:?}", expected_stack); - } - assert_eq!(&evt, expected_evt); - i+=1; - } - } - #[test] - #[cfg_attr(target_pointer_width = "32", ignore)] // FIXME(#14064) - fn test_streaming_parser() { - assert_stream_equal( - r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#, - vec![ - (ObjectStart, vec![]), - (StringValue("bar".to_string()), vec![StackElement::Key("foo")]), - (ArrayStart, vec![StackElement::Key("array")]), - (U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]), - (U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]), - (U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]), - (U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]), - (U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]), - (U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]), - (ArrayEnd, vec![StackElement::Key("array")]), - (ArrayStart, vec![StackElement::Key("idents")]), - (NullValue, vec![StackElement::Key("idents"), - StackElement::Index(0)]), - (BooleanValue(true), vec![StackElement::Key("idents"), - StackElement::Index(1)]), - (BooleanValue(false), vec![StackElement::Key("idents"), - StackElement::Index(2)]), - (ArrayEnd, vec![StackElement::Key("idents")]), - (ObjectEnd, vec![]), - ] - ); - } - fn last_event(src: &str) -> JsonEvent { - let mut parser = Parser::new(src.chars()); - let mut evt = NullValue; - loop { - evt = match parser.next() { - Some(e) => e, - None => return evt, - } - } - } - - #[test] - #[cfg_attr(target_pointer_width = "32", ignore)] // FIXME(#14064) - fn test_read_object_streaming() { - assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3))); - assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2))); - assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); - assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5))); - assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); - - assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6))); - assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6))); - assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7))); - assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8))); - assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8))); - assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8))); - - assert_stream_equal( - "{}", - vec![(ObjectStart, vec![]), (ObjectEnd, vec![])] - ); - assert_stream_equal( - "{\"a\": 3}", - vec![ - (ObjectStart, vec![]), - (U64Value(3), vec![StackElement::Key("a")]), - (ObjectEnd, vec![]), - ] - ); - assert_stream_equal( - "{ \"a\": null, \"b\" : true }", - vec![ - (ObjectStart, vec![]), - (NullValue, vec![StackElement::Key("a")]), - (BooleanValue(true), vec![StackElement::Key("b")]), - (ObjectEnd, vec![]), - ] - ); - assert_stream_equal( - "{\"a\" : 1.0 ,\"b\": [ true ]}", - vec![ - (ObjectStart, vec![]), - (F64Value(1.0), vec![StackElement::Key("a")]), - (ArrayStart, vec![StackElement::Key("b")]), - (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]), - (ArrayEnd, vec![StackElement::Key("b")]), - (ObjectEnd, vec![]), - ] - ); - assert_stream_equal( - r#"{ - "a": 1.0, - "b": [ - true, - "foo\nbar", - { "c": {"d": null} } - ] - }"#, - vec![ - (ObjectStart, vec![]), - (F64Value(1.0), vec![StackElement::Key("a")]), - (ArrayStart, vec![StackElement::Key("b")]), - (BooleanValue(true), vec![StackElement::Key("b"), - StackElement::Index(0)]), - (StringValue("foo\nbar".to_string()), vec![StackElement::Key("b"), - StackElement::Index(1)]), - (ObjectStart, vec![StackElement::Key("b"), - StackElement::Index(2)]), - (ObjectStart, vec![StackElement::Key("b"), - StackElement::Index(2), - StackElement::Key("c")]), - (NullValue, vec![StackElement::Key("b"), - StackElement::Index(2), - StackElement::Key("c"), - StackElement::Key("d")]), - (ObjectEnd, vec![StackElement::Key("b"), - StackElement::Index(2), - StackElement::Key("c")]), - (ObjectEnd, vec![StackElement::Key("b"), - StackElement::Index(2)]), - (ArrayEnd, vec![StackElement::Key("b")]), - (ObjectEnd, vec![]), - ] - ); - } - #[test] - #[cfg_attr(target_pointer_width = "32", ignore)] // FIXME(#14064) - fn test_read_array_streaming() { - assert_stream_equal( - "[]", - vec![ - (ArrayStart, vec![]), - (ArrayEnd, vec![]), - ] - ); - assert_stream_equal( - "[ ]", - vec![ - (ArrayStart, vec![]), - (ArrayEnd, vec![]), - ] - ); - assert_stream_equal( - "[true]", - vec![ - (ArrayStart, vec![]), - (BooleanValue(true), vec![StackElement::Index(0)]), - (ArrayEnd, vec![]), - ] - ); - assert_stream_equal( - "[ false ]", - vec![ - (ArrayStart, vec![]), - (BooleanValue(false), vec![StackElement::Index(0)]), - (ArrayEnd, vec![]), - ] - ); - assert_stream_equal( - "[null]", - vec![ - (ArrayStart, vec![]), - (NullValue, vec![StackElement::Index(0)]), - (ArrayEnd, vec![]), - ] - ); - assert_stream_equal( - "[3, 1]", - vec![ - (ArrayStart, vec![]), - (U64Value(3), vec![StackElement::Index(0)]), - (U64Value(1), vec![StackElement::Index(1)]), - (ArrayEnd, vec![]), - ] - ); - assert_stream_equal( - "\n[3, 2]\n", - vec![ - (ArrayStart, vec![]), - (U64Value(3), vec![StackElement::Index(0)]), - (U64Value(2), vec![StackElement::Index(1)]), - (ArrayEnd, vec![]), - ] - ); - assert_stream_equal( - "[2, [4, 1]]", - vec![ - (ArrayStart, vec![]), - (U64Value(2), vec![StackElement::Index(0)]), - (ArrayStart, vec![StackElement::Index(1)]), - (U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]), - (U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]), - (ArrayEnd, vec![StackElement::Index(1)]), - (ArrayEnd, vec![]), - ] - ); - - assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2))); - - assert_eq!(from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); - assert_eq!(from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); - assert_eq!(from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); - assert_eq!(from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); - assert_eq!(from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); - - } - #[test] - fn test_trailing_characters_streaming() { - assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5))); - assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5))); - assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6))); - assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2))); - assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3))); - assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3))); - } - #[test] - fn test_read_identifiers_streaming() { - assert_eq!(Parser::new("null".chars()).next(), Some(NullValue)); - assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true))); - assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false))); - - assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2))); - assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4))); - assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2))); - assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4))); - assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2))); - assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3))); - } - - #[test] - fn test_stack() { - let mut stack = Stack::new(); - - assert!(stack.is_empty()); - assert!(stack.is_empty()); - assert!(!stack.last_is_index()); - - stack.push_index(0); - stack.bump_index(); - - assert!(stack.len() == 1); - assert!(stack.is_equal_to(&[StackElement::Index(1)])); - assert!(stack.starts_with(&[StackElement::Index(1)])); - assert!(stack.ends_with(&[StackElement::Index(1)])); - assert!(stack.last_is_index()); - assert!(stack.get(0) == StackElement::Index(1)); - - stack.push_key("foo".to_string()); - - assert!(stack.len() == 2); - assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")])); - assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); - assert!(stack.starts_with(&[StackElement::Index(1)])); - assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")])); - assert!(stack.ends_with(&[StackElement::Key("foo")])); - assert!(!stack.last_is_index()); - assert!(stack.get(0) == StackElement::Index(1)); - assert!(stack.get(1) == StackElement::Key("foo")); - - stack.push_key("bar".to_string()); - - assert!(stack.len() == 3); - assert!(stack.is_equal_to(&[StackElement::Index(1), - StackElement::Key("foo"), - StackElement::Key("bar")])); - assert!(stack.starts_with(&[StackElement::Index(1)])); - assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); - assert!(stack.starts_with(&[StackElement::Index(1), - StackElement::Key("foo"), - StackElement::Key("bar")])); - assert!(stack.ends_with(&[StackElement::Key("bar")])); - assert!(stack.ends_with(&[StackElement::Key("foo"), StackElement::Key("bar")])); - assert!(stack.ends_with(&[StackElement::Index(1), - StackElement::Key("foo"), - StackElement::Key("bar")])); - assert!(!stack.last_is_index()); - assert!(stack.get(0) == StackElement::Index(1)); - assert!(stack.get(1) == StackElement::Key("foo")); - assert!(stack.get(2) == StackElement::Key("bar")); - - stack.pop(); - - assert!(stack.len() == 2); - assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")])); - assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); - assert!(stack.starts_with(&[StackElement::Index(1)])); - assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")])); - assert!(stack.ends_with(&[StackElement::Key("foo")])); - assert!(!stack.last_is_index()); - assert!(stack.get(0) == StackElement::Index(1)); - assert!(stack.get(1) == StackElement::Key("foo")); - } - - #[test] - fn test_to_json() { - use std::collections::{HashMap,BTreeMap}; - use super::ToJson; - - let array2 = Array(vec!(U64(1), U64(2))); - let array3 = Array(vec!(U64(1), U64(2), U64(3))); - let object = { - let mut tree_map = BTreeMap::new(); - tree_map.insert("a".to_string(), U64(1)); - tree_map.insert("b".to_string(), U64(2)); - Object(tree_map) - }; - - assert_eq!(array2.to_json(), array2); - assert_eq!(object.to_json(), object); - assert_eq!(3_isize.to_json(), I64(3)); - assert_eq!(4_i8.to_json(), I64(4)); - assert_eq!(5_i16.to_json(), I64(5)); - assert_eq!(6_i32.to_json(), I64(6)); - assert_eq!(7_i64.to_json(), I64(7)); - assert_eq!(8_usize.to_json(), U64(8)); - assert_eq!(9_u8.to_json(), U64(9)); - assert_eq!(10_u16.to_json(), U64(10)); - assert_eq!(11_u32.to_json(), U64(11)); - assert_eq!(12_u64.to_json(), U64(12)); - assert_eq!(13.0_f32.to_json(), F64(13.0_f64)); - assert_eq!(14.0_f64.to_json(), F64(14.0_f64)); - assert_eq!(().to_json(), Null); - assert_eq!(f32::INFINITY.to_json(), Null); - assert_eq!(f64::NAN.to_json(), Null); - assert_eq!(true.to_json(), Boolean(true)); - assert_eq!(false.to_json(), Boolean(false)); - assert_eq!("abc".to_json(), String("abc".to_string())); - assert_eq!("abc".to_string().to_json(), String("abc".to_string())); - assert_eq!((1_usize, 2_usize).to_json(), array2); - assert_eq!((1_usize, 2_usize, 3_usize).to_json(), array3); - assert_eq!([1_usize, 2_usize].to_json(), array2); - assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3); - assert_eq!((vec![1_usize, 2_usize]).to_json(), array2); - assert_eq!(vec!(1_usize, 2_usize, 3_usize).to_json(), array3); - let mut tree_map = BTreeMap::new(); - tree_map.insert("a".to_string(), 1 as usize); - tree_map.insert("b".to_string(), 2); - assert_eq!(tree_map.to_json(), object); - let mut hash_map = HashMap::new(); - hash_map.insert("a".to_string(), 1 as usize); - hash_map.insert("b".to_string(), 2); - assert_eq!(hash_map.to_json(), object); - assert_eq!(Some(15).to_json(), I64(15)); - assert_eq!(Some(15 as usize).to_json(), U64(15)); - assert_eq!(None::.to_json(), Null); - } - - #[test] - fn test_encode_hashmap_with_arbitrary_key() { - use std::collections::HashMap; - #[derive(PartialEq, Eq, Hash, RustcEncodable)] - struct ArbitraryType(usize); - let mut hm: HashMap = HashMap::new(); - hm.insert(ArbitraryType(1), true); - let mut mem_buf = string::String::new(); - let mut encoder = Encoder::new(&mut mem_buf); - let result = hm.encode(&mut encoder); - match result.err().unwrap() { - EncoderError::BadHashmapKey => (), - _ => panic!("expected bad hash map key") - } - } - - #[bench] - fn bench_streaming_small(b: &mut Bencher) { - b.iter( || { - let mut parser = Parser::new( - r#"{ - "a": 1.0, - "b": [ - true, - "foo\nbar", - { "c": {"d": null} } - ] - }"#.chars() - ); - loop { - match parser.next() { - None => return, - _ => {} - } - } - }); - } - #[bench] - fn bench_small(b: &mut Bencher) { - b.iter( || { - let _ = from_str(r#"{ - "a": 1.0, - "b": [ - true, - "foo\nbar", - { "c": {"d": null} } - ] - }"#); - }); - } - - fn big_json() -> string::String { - let mut src = "[\n".to_string(); - for _ in 0..500 { - src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \ - [1,2,3]},"#); - } - src.push_str("{}]"); - return src; - } - - #[bench] - fn bench_streaming_large(b: &mut Bencher) { - let src = big_json(); - b.iter( || { - let mut parser = Parser::new(src.chars()); - loop { - match parser.next() { - None => return, - _ => {} - } - } - }); - } - #[bench] - fn bench_large(b: &mut Bencher) { - let src = big_json(); - b.iter( || { let _ = from_str(&src); }); - } -} diff --git a/src/libserialize/lib.rs b/src/libserialize/lib.rs deleted file mode 100644 index 31790ce6290fe..0000000000000 --- a/src/libserialize/lib.rs +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Support code for encoding and decoding types. - -/* -Core encoding and decoding interfaces. -*/ - -// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364) -#![cfg_attr(stage0, feature(custom_attribute))] -#![crate_name = "serialize"] -#![unstable(feature = "rustc_private", - reason = "deprecated in favor of rustc-serialize on crates.io")] -#![staged_api] -#![crate_type = "rlib"] -#![crate_type = "dylib"] -#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "http://doc.rust-lang.org/nightly/", - html_playground_url = "http://play.rust-lang.org/")] - -#![feature(box_syntax)] -#![feature(collections)] -#![feature(core)] -#![feature(rustc_private)] -#![feature(staged_api)] -#![feature(std_misc)] -#![feature(unicode)] -#![feature(str_char)] -#![cfg_attr(test, feature(test))] - -// test harness access -#[cfg(test)] extern crate test; -#[macro_use] extern crate log; - -extern crate rustc_unicode; -extern crate collections; - -pub use self::serialize::{Decoder, Encoder, Decodable, Encodable, - DecoderHelpers, EncoderHelpers}; - -mod serialize; -mod collection_impls; - -pub mod hex; -pub mod json; - -mod rustc_serialize { - pub use serialize::*; -} diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs deleted file mode 100644 index af1387346106a..0000000000000 --- a/src/libserialize/serialize.rs +++ /dev/null @@ -1,637 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Support code for encoding and decoding types. - -/* -Core encoding and decoding interfaces. -*/ - -use std::path; -use std::rc::Rc; -use std::cell::{Cell, RefCell}; -use std::sync::Arc; - -pub trait Encoder { - type Error; - - // Primitive types: - fn emit_nil(&mut self) -> Result<(), Self::Error>; - fn emit_uint(&mut self, v: usize) -> Result<(), Self::Error>; - fn emit_u64(&mut self, v: u64) -> Result<(), Self::Error>; - fn emit_u32(&mut self, v: u32) -> Result<(), Self::Error>; - fn emit_u16(&mut self, v: u16) -> Result<(), Self::Error>; - fn emit_u8(&mut self, v: u8) -> Result<(), Self::Error>; - fn emit_int(&mut self, v: isize) -> Result<(), Self::Error>; - fn emit_i64(&mut self, v: i64) -> Result<(), Self::Error>; - fn emit_i32(&mut self, v: i32) -> Result<(), Self::Error>; - fn emit_i16(&mut self, v: i16) -> Result<(), Self::Error>; - fn emit_i8(&mut self, v: i8) -> Result<(), Self::Error>; - fn emit_bool(&mut self, v: bool) -> Result<(), Self::Error>; - fn emit_f64(&mut self, v: f64) -> Result<(), Self::Error>; - fn emit_f32(&mut self, v: f32) -> Result<(), Self::Error>; - fn emit_char(&mut self, v: char) -> Result<(), Self::Error>; - fn emit_str(&mut self, v: &str) -> Result<(), Self::Error>; - - // Compound types: - fn emit_enum(&mut self, name: &str, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - fn emit_enum_variant(&mut self, v_name: &str, - v_id: usize, - len: usize, - f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_enum_variant_arg(&mut self, a_idx: usize, f: F) - -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - fn emit_enum_struct_variant(&mut self, v_name: &str, - v_id: usize, - len: usize, - f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_enum_struct_variant_field(&mut self, - f_name: &str, - f_idx: usize, - f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - fn emit_struct(&mut self, name: &str, len: usize, f: F) - -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_struct_field(&mut self, f_name: &str, f_idx: usize, f: F) - -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - fn emit_tuple(&mut self, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_tuple_arg(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - fn emit_tuple_struct(&mut self, name: &str, len: usize, f: F) - -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_tuple_struct_arg(&mut self, f_idx: usize, f: F) - -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - // Specialized types: - fn emit_option(&mut self, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_option_none(&mut self) -> Result<(), Self::Error>; - fn emit_option_some(&mut self, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - fn emit_seq(&mut self, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_seq_elt(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - - fn emit_map(&mut self, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_map_elt_key(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_map_elt_val(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; -} - -pub trait Decoder { - type Error; - - // Primitive types: - fn read_nil(&mut self) -> Result<(), Self::Error>; - fn read_uint(&mut self) -> Result; - fn read_u64(&mut self) -> Result; - fn read_u32(&mut self) -> Result; - fn read_u16(&mut self) -> Result; - fn read_u8(&mut self) -> Result; - fn read_int(&mut self) -> Result; - fn read_i64(&mut self) -> Result; - fn read_i32(&mut self) -> Result; - fn read_i16(&mut self) -> Result; - fn read_i8(&mut self) -> Result; - fn read_bool(&mut self) -> Result; - fn read_f64(&mut self) -> Result; - fn read_f32(&mut self) -> Result; - fn read_char(&mut self) -> Result; - fn read_str(&mut self) -> Result; - - // Compound types: - fn read_enum(&mut self, name: &str, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; - - fn read_enum_variant(&mut self, names: &[&str], f: F) - -> Result - where F: FnMut(&mut Self, usize) -> Result; - fn read_enum_variant_arg(&mut self, a_idx: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - - fn read_enum_struct_variant(&mut self, names: &[&str], f: F) - -> Result - where F: FnMut(&mut Self, usize) -> Result; - fn read_enum_struct_variant_field(&mut self, - &f_name: &str, - f_idx: usize, - f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - - fn read_struct(&mut self, s_name: &str, len: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - fn read_struct_field(&mut self, - f_name: &str, - f_idx: usize, - f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - - fn read_tuple(&mut self, len: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; - fn read_tuple_arg(&mut self, a_idx: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - - fn read_tuple_struct(&mut self, s_name: &str, len: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - fn read_tuple_struct_arg(&mut self, a_idx: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - - // Specialized types: - fn read_option(&mut self, f: F) -> Result - where F: FnMut(&mut Self, bool) -> Result; - - fn read_seq(&mut self, f: F) -> Result - where F: FnOnce(&mut Self, usize) -> Result; - fn read_seq_elt(&mut self, idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; - - fn read_map(&mut self, f: F) -> Result - where F: FnOnce(&mut Self, usize) -> Result; - fn read_map_elt_key(&mut self, idx: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - fn read_map_elt_val(&mut self, idx: usize, f: F) - -> Result - where F: FnOnce(&mut Self) -> Result; - - // Failure - fn error(&mut self, err: &str) -> Self::Error; -} - -pub trait Encodable { - fn encode(&self, s: &mut S) -> Result<(), S::Error>; -} - -pub trait Decodable { - fn decode(d: &mut D) -> Result; -} - -impl Encodable for usize { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_uint(*self) - } -} - -impl Decodable for usize { - fn decode(d: &mut D) -> Result { - d.read_uint() - } -} - -impl Encodable for u8 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u8(*self) - } -} - -impl Decodable for u8 { - fn decode(d: &mut D) -> Result { - d.read_u8() - } -} - -impl Encodable for u16 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u16(*self) - } -} - -impl Decodable for u16 { - fn decode(d: &mut D) -> Result { - d.read_u16() - } -} - -impl Encodable for u32 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u32(*self) - } -} - -impl Decodable for u32 { - fn decode(d: &mut D) -> Result { - d.read_u32() - } -} - -impl Encodable for u64 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_u64(*self) - } -} - -impl Decodable for u64 { - fn decode(d: &mut D) -> Result { - d.read_u64() - } -} - -impl Encodable for isize { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_int(*self) - } -} - -impl Decodable for isize { - fn decode(d: &mut D) -> Result { - d.read_int() - } -} - -impl Encodable for i8 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_i8(*self) - } -} - -impl Decodable for i8 { - fn decode(d: &mut D) -> Result { - d.read_i8() - } -} - -impl Encodable for i16 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_i16(*self) - } -} - -impl Decodable for i16 { - fn decode(d: &mut D) -> Result { - d.read_i16() - } -} - -impl Encodable for i32 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_i32(*self) - } -} - -impl Decodable for i32 { - fn decode(d: &mut D) -> Result { - d.read_i32() - } -} - -impl Encodable for i64 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_i64(*self) - } -} - -impl Decodable for i64 { - fn decode(d: &mut D) -> Result { - d.read_i64() - } -} - -impl Encodable for str { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self) - } -} - -impl Encodable for String { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self[..]) - } -} - -impl Decodable for String { - fn decode(d: &mut D) -> Result { - d.read_str() - } -} - -impl Encodable for f32 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_f32(*self) - } -} - -impl Decodable for f32 { - fn decode(d: &mut D) -> Result { - d.read_f32() - } -} - -impl Encodable for f64 { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_f64(*self) - } -} - -impl Decodable for f64 { - fn decode(d: &mut D) -> Result { - d.read_f64() - } -} - -impl Encodable for bool { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_bool(*self) - } -} - -impl Decodable for bool { - fn decode(d: &mut D) -> Result { - d.read_bool() - } -} - -impl Encodable for char { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_char(*self) - } -} - -impl Decodable for char { - fn decode(d: &mut D) -> Result { - d.read_char() - } -} - -impl Encodable for () { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_nil() - } -} - -impl Decodable for () { - fn decode(d: &mut D) -> Result<(), D::Error> { - d.read_nil() - } -} - -impl<'a, T: ?Sized + Encodable> Encodable for &'a T { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - (**self).encode(s) - } -} - -impl Encodable for Box { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - (**self).encode(s) - } -} - -impl< T: Decodable> Decodable for Box { - fn decode(d: &mut D) -> Result, D::Error> { - Ok(box try!(Decodable::decode(d))) - } -} - -impl< T: Decodable> Decodable for Box<[T]> { - fn decode(d: &mut D) -> Result, D::Error> { - let v: Vec = try!(Decodable::decode(d)); - Ok(v.into_boxed_slice()) - } -} - -impl Encodable for Rc { - #[inline] - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - (**self).encode(s) - } -} - -impl Decodable for Rc { - #[inline] - fn decode(d: &mut D) -> Result, D::Error> { - Ok(Rc::new(try!(Decodable::decode(d)))) - } -} - -impl Encodable for [T] { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_seq(self.len(), |s| { - for (i, e) in self.iter().enumerate() { - try!(s.emit_seq_elt(i, |s| e.encode(s))) - } - Ok(()) - }) - } -} - -impl Encodable for Vec { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_seq(self.len(), |s| { - for (i, e) in self.iter().enumerate() { - try!(s.emit_seq_elt(i, |s| e.encode(s))) - } - Ok(()) - }) - } -} - -impl Decodable for Vec { - fn decode(d: &mut D) -> Result, D::Error> { - d.read_seq(|d, len| { - let mut v = Vec::with_capacity(len); - for i in 0..len { - v.push(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); - } - Ok(v) - }) - } -} - -impl Encodable for Option { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_option(|s| { - match *self { - None => s.emit_option_none(), - Some(ref v) => s.emit_option_some(|s| v.encode(s)), - } - }) - } -} - -impl Decodable for Option { - fn decode(d: &mut D) -> Result, D::Error> { - d.read_option(|d, b| { - if b { - Ok(Some(try!(Decodable::decode(d)))) - } else { - Ok(None) - } - }) - } -} - -macro_rules! peel { - ($name:ident, $($other:ident,)*) => (tuple! { $($other,)* }) -} - -/// Evaluates to the number of identifiers passed to it, for example: `count_idents!(a, b, c) == 3 -macro_rules! count_idents { - () => { 0 }; - ($_i:ident, $($rest:ident,)*) => { 1 + count_idents!($($rest,)*) } -} - -macro_rules! tuple { - () => (); - ( $($name:ident,)+ ) => ( - impl<$($name:Decodable),*> Decodable for ($($name,)*) { - #[allow(non_snake_case)] - fn decode(d: &mut D) -> Result<($($name,)*), D::Error> { - let len: usize = count_idents!($($name,)*); - d.read_tuple(len, |d| { - let mut i = 0; - let ret = ($(try!(d.read_tuple_arg({ i+=1; i-1 }, - |d| -> Result<$name,D::Error> { - Decodable::decode(d) - })),)*); - return Ok(ret); - }) - } - } - impl<$($name:Encodable),*> Encodable for ($($name,)*) { - #[allow(non_snake_case)] - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - let ($(ref $name,)*) = *self; - let mut n = 0; - $(let $name = $name; n += 1;)* - s.emit_tuple(n, |s| { - let mut i = 0; - $(try!(s.emit_tuple_arg({ i+=1; i-1 }, |s| $name.encode(s)));)* - Ok(()) - }) - } - } - peel! { $($name,)* } - ) -} - -tuple! { T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, } - -impl Encodable for path::PathBuf { - fn encode(&self, e: &mut S) -> Result<(), S::Error> { - self.to_str().unwrap().encode(e) - } -} - -impl Decodable for path::PathBuf { - fn decode(d: &mut D) -> Result { - let bytes: String = try!(Decodable::decode(d)); - Ok(path::PathBuf::from(bytes)) - } -} - -impl Encodable for Cell { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - self.get().encode(s) - } -} - -impl Decodable for Cell { - fn decode(d: &mut D) -> Result, D::Error> { - Ok(Cell::new(try!(Decodable::decode(d)))) - } -} - -// FIXME: #15036 -// Should use `try_borrow`, returning a -// `encoder.error("attempting to Encode borrowed RefCell")` -// from `encode` when `try_borrow` returns `None`. - -impl Encodable for RefCell { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - self.borrow().encode(s) - } -} - -impl Decodable for RefCell { - fn decode(d: &mut D) -> Result, D::Error> { - Ok(RefCell::new(try!(Decodable::decode(d)))) - } -} - -impl Encodable for Arc { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - (**self).encode(s) - } -} - -impl Decodable for Arc { - fn decode(d: &mut D) -> Result, D::Error> { - Ok(Arc::new(try!(Decodable::decode(d)))) - } -} - -// ___________________________________________________________________________ -// Helper routines - -pub trait EncoderHelpers: Encoder { - fn emit_from_vec(&mut self, v: &[T], f: F) - -> Result<(), Self::Error> - where F: FnMut(&mut Self, &T) -> Result<(), Self::Error>; -} - -impl EncoderHelpers for S { - fn emit_from_vec(&mut self, v: &[T], mut f: F) -> Result<(), S::Error> where - F: FnMut(&mut S, &T) -> Result<(), S::Error>, - { - self.emit_seq(v.len(), |this| { - for (i, e) in v.iter().enumerate() { - try!(this.emit_seq_elt(i, |this| { - f(this, e) - })); - } - Ok(()) - }) - } -} - -pub trait DecoderHelpers: Decoder { - fn read_to_vec(&mut self, f: F) - -> Result, Self::Error> where - F: FnMut(&mut Self) -> Result; -} - -impl DecoderHelpers for D { - fn read_to_vec(&mut self, mut f: F) -> Result, D::Error> where F: - FnMut(&mut D) -> Result, - { - self.read_seq(|this, len| { - let mut v = Vec::with_capacity(len); - for i in 0..len { - v.push(try!(this.read_seq_elt(i, |this| f(this)))); - } - Ok(v) - }) - } -} diff --git a/src/libterm/lib.rs b/src/libterm/lib.rs deleted file mode 100644 index ec1426e6e48ce..0000000000000 --- a/src/libterm/lib.rs +++ /dev/null @@ -1,256 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Terminal formatting library. -//! -//! This crate provides the `Terminal` trait, which abstracts over an [ANSI -//! Terminal][ansi] to provide color printing, among other things. There are two implementations, -//! the `TerminfoTerminal`, which uses control characters from a -//! [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console -//! API][win]. -//! -//! # Examples -//! -//! ```no_run -//! # #![feature(rustc_private)] -//! extern crate term; -//! -//! use std::io::prelude::*; -//! -//! fn main() { -//! let mut t = term::stdout().unwrap(); -//! -//! t.fg(term::color::GREEN).unwrap(); -//! (write!(t, "hello, ")).unwrap(); -//! -//! t.fg(term::color::RED).unwrap(); -//! (writeln!(t, "world!")).unwrap(); -//! -//! t.reset().unwrap(); -//! } -//! ``` -//! -//! [ansi]: https://en.wikipedia.org/wiki/ANSI_escape_code -//! [win]: http://msdn.microsoft.com/en-us/library/windows/desktop/ms682010%28v=vs.85%29.aspx -//! [ti]: https://en.wikipedia.org/wiki/Terminfo - -// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364) -#![cfg_attr(stage0, feature(custom_attribute))] -#![crate_name = "term"] -#![unstable(feature = "rustc_private", - reason = "use the crates.io `term` library instead")] -#![staged_api] -#![crate_type = "rlib"] -#![crate_type = "dylib"] -#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "http://doc.rust-lang.org/nightly/", - html_playground_url = "http://play.rust-lang.org/")] -#![deny(missing_docs)] - -#![feature(box_syntax)] -#![feature(collections)] -#![feature(rustc_private)] -#![feature(staged_api)] -#![feature(std_misc)] -#![feature(str_char)] -#![feature(path_ext)] -#![cfg_attr(windows, feature(libc))] - -#[macro_use] extern crate log; - -pub use terminfo::TerminfoTerminal; -#[cfg(windows)] -pub use win::WinConsole; - -use std::io::prelude::*; -use std::io; - -pub mod terminfo; - -#[cfg(windows)] -mod win; - -/// A hack to work around the fact that `Box` does not -/// currently implement `Write`. -pub struct WriterWrapper { - wrapped: Box, -} - -impl Write for WriterWrapper { - #[inline] - fn write(&mut self, buf: &[u8]) -> io::Result { - self.wrapped.write(buf) - } - - #[inline] - fn flush(&mut self) -> io::Result<()> { - self.wrapped.flush() - } -} - -#[cfg(not(windows))] -/// Return a Terminal wrapping stdout, or None if a terminal couldn't be -/// opened. -pub fn stdout() -> Option + Send>> { - TerminfoTerminal::new(WriterWrapper { - wrapped: box std::io::stdout(), - }) -} - -#[cfg(windows)] -/// Return a Terminal wrapping stdout, or None if a terminal couldn't be -/// opened. -pub fn stdout() -> Option + Send>> { - let ti = TerminfoTerminal::new(WriterWrapper { - wrapped: box std::io::stdout(), - }); - - match ti { - Some(t) => Some(t), - None => { - WinConsole::new(WriterWrapper { - wrapped: box std::io::stdout(), - }) - } - } -} - -#[cfg(not(windows))] -/// Return a Terminal wrapping stderr, or None if a terminal couldn't be -/// opened. -pub fn stderr() -> Option + Send>> { - TerminfoTerminal::new(WriterWrapper { - wrapped: box std::io::stderr(), - }) -} - -#[cfg(windows)] -/// Return a Terminal wrapping stderr, or None if a terminal couldn't be -/// opened. -pub fn stderr() -> Option + Send>> { - let ti = TerminfoTerminal::new(WriterWrapper { - wrapped: box std::io::stderr(), - }); - - match ti { - Some(t) => Some(t), - None => { - WinConsole::new(WriterWrapper { - wrapped: box std::io::stderr(), - }) - } - } -} - - -/// Terminal color definitions -pub mod color { - /// Number for a terminal color - pub type Color = u16; - - pub const BLACK: Color = 0; - pub const RED: Color = 1; - pub const GREEN: Color = 2; - pub const YELLOW: Color = 3; - pub const BLUE: Color = 4; - pub const MAGENTA: Color = 5; - pub const CYAN: Color = 6; - pub const WHITE: Color = 7; - - pub const BRIGHT_BLACK: Color = 8; - pub const BRIGHT_RED: Color = 9; - pub const BRIGHT_GREEN: Color = 10; - pub const BRIGHT_YELLOW: Color = 11; - pub const BRIGHT_BLUE: Color = 12; - pub const BRIGHT_MAGENTA: Color = 13; - pub const BRIGHT_CYAN: Color = 14; - pub const BRIGHT_WHITE: Color = 15; -} - -/// Terminal attributes -pub mod attr { - pub use self::Attr::*; - - /// Terminal attributes for use with term.attr(). - /// - /// Most attributes can only be turned on and must be turned off with term.reset(). - /// The ones that can be turned off explicitly take a boolean value. - /// Color is also represented as an attribute for convenience. - #[derive(Copy, Clone)] - pub enum Attr { - /// Bold (or possibly bright) mode - Bold, - /// Dim mode, also called faint or half-bright. Often not supported - Dim, - /// Italics mode. Often not supported - Italic(bool), - /// Underline mode - Underline(bool), - /// Blink mode - Blink, - /// Standout mode. Often implemented as Reverse, sometimes coupled with Bold - Standout(bool), - /// Reverse mode, inverts the foreground and background colors - Reverse, - /// Secure mode, also called invis mode. Hides the printed text - Secure, - /// Convenience attribute to set the foreground color - ForegroundColor(super::color::Color), - /// Convenience attribute to set the background color - BackgroundColor(super::color::Color) - } -} - -/// A terminal with similar capabilities to an ANSI Terminal -/// (foreground/background colors etc). -pub trait Terminal: Write { - /// Sets the foreground color to the given color. - /// - /// If the color is a bright color, but the terminal only supports 8 colors, - /// the corresponding normal color will be used instead. - /// - /// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)` - /// if there was an I/O error. - fn fg(&mut self, color: color::Color) -> io::Result; - - /// Sets the background color to the given color. - /// - /// If the color is a bright color, but the terminal only supports 8 colors, - /// the corresponding normal color will be used instead. - /// - /// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)` - /// if there was an I/O error. - fn bg(&mut self, color: color::Color) -> io::Result; - - /// Sets the given terminal attribute, if supported. Returns `Ok(true)` - /// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if - /// there was an I/O error. - fn attr(&mut self, attr: attr::Attr) -> io::Result; - - /// Returns whether the given terminal attribute is supported. - fn supports_attr(&self, attr: attr::Attr) -> bool; - - /// Resets all terminal attributes and color to the default. - /// Returns `Ok()`. - fn reset(&mut self) -> io::Result<()>; - - /// Gets an immutable reference to the stream inside - fn get_ref<'a>(&'a self) -> &'a T; - - /// Gets a mutable reference to the stream inside - fn get_mut<'a>(&'a mut self) -> &'a mut T; -} - -/// A terminal which can be unwrapped. -pub trait UnwrappableTerminal: Terminal { - /// Returns the contained stream, destroying the `Terminal` - fn unwrap(self) -> T; -} diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs deleted file mode 100644 index 4840cd1fddadf..0000000000000 --- a/src/libterm/terminfo/mod.rs +++ /dev/null @@ -1,235 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Terminfo database interface. - -use std::collections::HashMap; -use std::env; -use std::io::prelude::*; -use std::io; - -use attr; -use color; -use Terminal; -use UnwrappableTerminal; -use self::searcher::open; -use self::parser::compiled::{parse, msys_terminfo}; -use self::parm::{expand, Number, Variables}; - - -/// A parsed terminfo database entry. -#[derive(Debug)] -pub struct TermInfo { - /// Names for the terminal - pub names: Vec , - /// Map of capability name to boolean value - pub bools: HashMap, - /// Map of capability name to numeric value - pub numbers: HashMap, - /// Map of capability name to raw (unexpanded) string - pub strings: HashMap > -} - -pub mod searcher; - -/// TermInfo format parsing. -pub mod parser { - //! ncurses-compatible compiled terminfo format parsing (term(5)) - pub mod compiled; -} -pub mod parm; - - -fn cap_for_attr(attr: attr::Attr) -> &'static str { - match attr { - attr::Bold => "bold", - attr::Dim => "dim", - attr::Italic(true) => "sitm", - attr::Italic(false) => "ritm", - attr::Underline(true) => "smul", - attr::Underline(false) => "rmul", - attr::Blink => "blink", - attr::Standout(true) => "smso", - attr::Standout(false) => "rmso", - attr::Reverse => "rev", - attr::Secure => "invis", - attr::ForegroundColor(_) => "setaf", - attr::BackgroundColor(_) => "setab" - } -} - -/// A Terminal that knows how many colors it supports, with a reference to its -/// parsed Terminfo database record. -pub struct TerminfoTerminal { - num_colors: u16, - out: T, - ti: Box -} - -impl Terminal for TerminfoTerminal { - fn fg(&mut self, color: color::Color) -> io::Result { - let color = self.dim_if_necessary(color); - if self.num_colors > color { - let s = expand(self.ti - .strings - .get("setaf") - .unwrap() - , - &[Number(color as isize)], &mut Variables::new()); - if s.is_ok() { - try!(self.out.write_all(&s.unwrap())); - return Ok(true) - } - } - Ok(false) - } - - fn bg(&mut self, color: color::Color) -> io::Result { - let color = self.dim_if_necessary(color); - if self.num_colors > color { - let s = expand(self.ti - .strings - .get("setab") - .unwrap() - , - &[Number(color as isize)], &mut Variables::new()); - if s.is_ok() { - try!(self.out.write_all(&s.unwrap())); - return Ok(true) - } - } - Ok(false) - } - - fn attr(&mut self, attr: attr::Attr) -> io::Result { - match attr { - attr::ForegroundColor(c) => self.fg(c), - attr::BackgroundColor(c) => self.bg(c), - _ => { - let cap = cap_for_attr(attr); - let parm = self.ti.strings.get(cap); - if parm.is_some() { - let s = expand(parm.unwrap(), - &[], - &mut Variables::new()); - if s.is_ok() { - try!(self.out.write_all(&s.unwrap())); - return Ok(true) - } - } - Ok(false) - } - } - } - - fn supports_attr(&self, attr: attr::Attr) -> bool { - match attr { - attr::ForegroundColor(_) | attr::BackgroundColor(_) => { - self.num_colors > 0 - } - _ => { - let cap = cap_for_attr(attr); - self.ti.strings.get(cap).is_some() - } - } - } - - fn reset(&mut self) -> io::Result<()> { - let mut cap = self.ti.strings.get("sgr0"); - if cap.is_none() { - // are there any terminals that have color/attrs and not sgr0? - // Try falling back to sgr, then op - cap = self.ti.strings.get("sgr"); - if cap.is_none() { - cap = self.ti.strings.get("op"); - } - } - let s = cap.map_or(Err("can't find terminfo capability `sgr0`".to_string()), |op| { - expand(op, &[], &mut Variables::new()) - }); - if s.is_ok() { - return self.out.write_all(&s.unwrap()) - } - Ok(()) - } - - fn get_ref<'a>(&'a self) -> &'a T { &self.out } - - fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.out } -} - -impl UnwrappableTerminal for TerminfoTerminal { - fn unwrap(self) -> T { self.out } -} - -impl TerminfoTerminal { - /// Returns `None` whenever the terminal cannot be created for some - /// reason. - pub fn new(out: T) -> Option+Send+'static>> { - let term = match env::var("TERM") { - Ok(t) => t, - Err(..) => { - debug!("TERM environment variable not defined"); - return None; - } - }; - - let mut file = match open(&term[..]) { - Ok(f) => f, - Err(err) => return match env::var("MSYSCON") { - Ok(ref val) if &val[..] == "mintty.exe" => { - // msys terminal - Some(box TerminfoTerminal{ - out: out, - ti: msys_terminfo(), - num_colors: 8, - }) - }, - _ => { - debug!("error finding terminfo entry: {:?}", err); - None - }, - }, - }; - - let ti = parse(&mut file, false); - if ti.is_err() { - debug!("error parsing terminfo entry: {:?}", ti.err().unwrap()); - return None; - } - - let inf = ti.unwrap(); - let nc = if inf.strings.get("setaf").is_some() - && inf.strings.get("setab").is_some() { - inf.numbers.get("colors").map_or(0, |&n| n) - } else { 0 }; - - return Some(box TerminfoTerminal {out: out, - ti: inf, - num_colors: nc}); - } - - fn dim_if_necessary(&self, color: color::Color) -> color::Color { - if color >= self.num_colors && color >= 8 && color < 16 { - color-8 - } else { color } - } -} - - -impl Write for TerminfoTerminal { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.out.write(buf) - } - - fn flush(&mut self) -> io::Result<()> { - self.out.flush() - } -} diff --git a/src/libterm/terminfo/parm.rs b/src/libterm/terminfo/parm.rs deleted file mode 100644 index 2b8c24741ae7c..0000000000000 --- a/src/libterm/terminfo/parm.rs +++ /dev/null @@ -1,703 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Parameterized string expansion - -pub use self::Param::*; -use self::States::*; -use self::FormatState::*; -use self::FormatOp::*; -use std::ascii::OwnedAsciiExt; -use std::mem::replace; -use std::iter::repeat; - -#[derive(Copy, Clone, PartialEq)] -enum States { - Nothing, - Percent, - SetVar, - GetVar, - PushParam, - CharConstant, - CharClose, - IntConstant(isize), - FormatPattern(Flags, FormatState), - SeekIfElse(isize), - SeekIfElsePercent(isize), - SeekIfEnd(isize), - SeekIfEndPercent(isize) -} - -#[derive(Copy, Clone, PartialEq)] -enum FormatState { - FormatStateFlags, - FormatStateWidth, - FormatStatePrecision -} - -/// Types of parameters a capability can use -#[allow(missing_docs)] -#[derive(Clone)] -pub enum Param { - Words(String), - Number(isize) -} - -/// Container for static and dynamic variable arrays -pub struct Variables { - /// Static variables A-Z - sta: [Param; 26], - /// Dynamic variables a-z - dyn: [Param; 26] -} - -impl Variables { - /// Return a new zero-initialized Variables - pub fn new() -> Variables { - Variables { - sta: [ - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), - ], - dyn: [ - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), - ], - } - } -} - -/// Expand a parameterized capability -/// -/// # Arguments -/// * `cap` - string to expand -/// * `params` - vector of params for %p1 etc -/// * `vars` - Variables struct for %Pa etc -/// -/// To be compatible with ncurses, `vars` should be the same between calls to `expand` for -/// multiple capabilities for the same terminal. -pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables) - -> Result , String> { - let mut state = Nothing; - - // expanded cap will only rarely be larger than the cap itself - let mut output = Vec::with_capacity(cap.len()); - - let mut stack: Vec = Vec::new(); - - // Copy parameters into a local vector for mutability - let mut mparams = [ - Number(0), Number(0), Number(0), Number(0), Number(0), - Number(0), Number(0), Number(0), Number(0), - ]; - for (dst, src) in mparams.iter_mut().zip(params.iter()) { - *dst = (*src).clone(); - } - - for &c in cap { - let cur = c as char; - let mut old_state = state; - match state { - Nothing => { - if cur == '%' { - state = Percent; - } else { - output.push(c); - } - }, - Percent => { - match cur { - '%' => { output.push(c); state = Nothing }, - 'c' => if !stack.is_empty() { - match stack.pop().unwrap() { - // if c is 0, use 0200 (128) for ncurses compatibility - Number(c) => { - output.push(if c == 0 { - 128 - } else { - c as u8 - }) - } - _ => return Err("a non-char was used with %c".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - 'p' => state = PushParam, - 'P' => state = SetVar, - 'g' => state = GetVar, - '\'' => state = CharConstant, - '{' => state = IntConstant(0), - 'l' => if !stack.is_empty() { - match stack.pop().unwrap() { - Words(s) => stack.push(Number(s.len() as isize)), - _ => return Err("a non-str was used with %l".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '+' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x + y)), - _ => return Err("non-numbers on stack with +".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '-' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x - y)), - _ => return Err("non-numbers on stack with -".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '*' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x * y)), - _ => return Err("non-numbers on stack with *".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '/' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x / y)), - _ => return Err("non-numbers on stack with /".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - 'm' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x % y)), - _ => return Err("non-numbers on stack with %".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '&' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x & y)), - _ => return Err("non-numbers on stack with &".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '|' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x | y)), - _ => return Err("non-numbers on stack with |".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '^' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(x ^ y)), - _ => return Err("non-numbers on stack with ^".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '=' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(if x == y { 1 } - else { 0 })), - _ => return Err("non-numbers on stack with =".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '>' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(if x > y { 1 } - else { 0 })), - _ => return Err("non-numbers on stack with >".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '<' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(y), Number(x)) => stack.push(Number(if x < y { 1 } - else { 0 })), - _ => return Err("non-numbers on stack with <".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - 'A' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(0), Number(_)) => stack.push(Number(0)), - (Number(_), Number(0)) => stack.push(Number(0)), - (Number(_), Number(_)) => stack.push(Number(1)), - _ => return Err("non-numbers on stack with logical and".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - 'O' => if stack.len() > 1 { - match (stack.pop().unwrap(), stack.pop().unwrap()) { - (Number(0), Number(0)) => stack.push(Number(0)), - (Number(_), Number(_)) => stack.push(Number(1)), - _ => return Err("non-numbers on stack with logical or".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '!' => if !stack.is_empty() { - match stack.pop().unwrap() { - Number(0) => stack.push(Number(1)), - Number(_) => stack.push(Number(0)), - _ => return Err("non-number on stack with logical not".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - '~' => if !stack.is_empty() { - match stack.pop().unwrap() { - Number(x) => stack.push(Number(!x)), - _ => return Err("non-number on stack with %~".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - 'i' => match (mparams[0].clone(), mparams[1].clone()) { - (Number(x), Number(y)) => { - mparams[0] = Number(x+1); - mparams[1] = Number(y+1); - }, - (_, _) => return Err("first two params not numbers with %i".to_string()) - }, - - // printf-style support for %doxXs - 'd'|'o'|'x'|'X'|'s' => if !stack.is_empty() { - let flags = Flags::new(); - let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), flags); - if res.is_err() { return res } - output.push_all(&res.unwrap()) - } else { return Err("stack is empty".to_string()) }, - ':'|'#'|' '|'.'|'0'...'9' => { - let mut flags = Flags::new(); - let mut fstate = FormatStateFlags; - match cur { - ':' => (), - '#' => flags.alternate = true, - ' ' => flags.space = true, - '.' => fstate = FormatStatePrecision, - '0'...'9' => { - flags.width = cur as usize - '0' as usize; - fstate = FormatStateWidth; - } - _ => unreachable!() - } - state = FormatPattern(flags, fstate); - } - - // conditionals - '?' => (), - 't' => if !stack.is_empty() { - match stack.pop().unwrap() { - Number(0) => state = SeekIfElse(0), - Number(_) => (), - _ => return Err("non-number on stack \ - with conditional".to_string()) - } - } else { return Err("stack is empty".to_string()) }, - 'e' => state = SeekIfEnd(0), - ';' => (), - - _ => { - return Err(format!("unrecognized format option {:?}", cur)) - } - } - }, - PushParam => { - // params are 1-indexed - stack.push(mparams[match cur.to_digit(10) { - Some(d) => d as usize - 1, - None => return Err("bad param number".to_string()) - }].clone()); - }, - SetVar => { - if cur >= 'A' && cur <= 'Z' { - if !stack.is_empty() { - let idx = (cur as u8) - b'A'; - vars.sta[idx as usize] = stack.pop().unwrap(); - } else { return Err("stack is empty".to_string()) } - } else if cur >= 'a' && cur <= 'z' { - if !stack.is_empty() { - let idx = (cur as u8) - b'a'; - vars.dyn[idx as usize] = stack.pop().unwrap(); - } else { return Err("stack is empty".to_string()) } - } else { - return Err("bad variable name in %P".to_string()); - } - }, - GetVar => { - if cur >= 'A' && cur <= 'Z' { - let idx = (cur as u8) - b'A'; - stack.push(vars.sta[idx as usize].clone()); - } else if cur >= 'a' && cur <= 'z' { - let idx = (cur as u8) - b'a'; - stack.push(vars.dyn[idx as usize].clone()); - } else { - return Err("bad variable name in %g".to_string()); - } - }, - CharConstant => { - stack.push(Number(c as isize)); - state = CharClose; - }, - CharClose => { - if cur != '\'' { - return Err("malformed character constant".to_string()); - } - }, - IntConstant(i) => { - match cur { - '}' => { - stack.push(Number(i)); - state = Nothing; - } - '0'...'9' => { - state = IntConstant(i*10 + (cur as isize - '0' as isize)); - old_state = Nothing; - } - _ => return Err("bad isize constant".to_string()) - } - } - FormatPattern(ref mut flags, ref mut fstate) => { - old_state = Nothing; - match (*fstate, cur) { - (_,'d')|(_,'o')|(_,'x')|(_,'X')|(_,'s') => if !stack.is_empty() { - let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), *flags); - if res.is_err() { return res } - output.push_all(&res.unwrap()); - // will cause state to go to Nothing - old_state = FormatPattern(*flags, *fstate); - } else { return Err("stack is empty".to_string()) }, - (FormatStateFlags,'#') => { - flags.alternate = true; - } - (FormatStateFlags,'-') => { - flags.left = true; - } - (FormatStateFlags,'+') => { - flags.sign = true; - } - (FormatStateFlags,' ') => { - flags.space = true; - } - (FormatStateFlags,'0'...'9') => { - flags.width = cur as usize - '0' as usize; - *fstate = FormatStateWidth; - } - (FormatStateFlags,'.') => { - *fstate = FormatStatePrecision; - } - (FormatStateWidth,'0'...'9') => { - let old = flags.width; - flags.width = flags.width * 10 + (cur as usize - '0' as usize); - if flags.width < old { return Err("format width overflow".to_string()) } - } - (FormatStateWidth,'.') => { - *fstate = FormatStatePrecision; - } - (FormatStatePrecision,'0'...'9') => { - let old = flags.precision; - flags.precision = flags.precision * 10 + (cur as usize - '0' as usize); - if flags.precision < old { - return Err("format precision overflow".to_string()) - } - } - _ => return Err("invalid format specifier".to_string()) - } - } - SeekIfElse(level) => { - if cur == '%' { - state = SeekIfElsePercent(level); - } - old_state = Nothing; - } - SeekIfElsePercent(level) => { - if cur == ';' { - if level == 0 { - state = Nothing; - } else { - state = SeekIfElse(level-1); - } - } else if cur == 'e' && level == 0 { - state = Nothing; - } else if cur == '?' { - state = SeekIfElse(level+1); - } else { - state = SeekIfElse(level); - } - } - SeekIfEnd(level) => { - if cur == '%' { - state = SeekIfEndPercent(level); - } - old_state = Nothing; - } - SeekIfEndPercent(level) => { - if cur == ';' { - if level == 0 { - state = Nothing; - } else { - state = SeekIfEnd(level-1); - } - } else if cur == '?' { - state = SeekIfEnd(level+1); - } else { - state = SeekIfEnd(level); - } - } - } - if state == old_state { - state = Nothing; - } - } - Ok(output) -} - -#[derive(Copy, Clone, PartialEq)] -struct Flags { - width: usize, - precision: usize, - alternate: bool, - left: bool, - sign: bool, - space: bool -} - -impl Flags { - fn new() -> Flags { - Flags{ width: 0, precision: 0, alternate: false, - left: false, sign: false, space: false } - } -} - -#[derive(Copy, Clone)] -enum FormatOp { - FormatDigit, - FormatOctal, - FormatHex, - FormatHEX, - FormatString -} - -impl FormatOp { - fn from_char(c: char) -> FormatOp { - match c { - 'd' => FormatDigit, - 'o' => FormatOctal, - 'x' => FormatHex, - 'X' => FormatHEX, - 's' => FormatString, - _ => panic!("bad FormatOp char") - } - } - fn to_char(self) -> char { - match self { - FormatDigit => 'd', - FormatOctal => 'o', - FormatHex => 'x', - FormatHEX => 'X', - FormatString => 's' - } - } -} - -fn format(val: Param, op: FormatOp, flags: Flags) -> Result ,String> { - let mut s = match val { - Number(d) => { - let s = match (op, flags.sign) { - (FormatDigit, true) => format!("{:+}", d).into_bytes(), - (FormatDigit, false) => format!("{}", d).into_bytes(), - (FormatOctal, _) => format!("{:o}", d).into_bytes(), - (FormatHex, _) => format!("{:x}", d).into_bytes(), - (FormatHEX, _) => format!("{:X}", d).into_bytes(), - (FormatString, _) => { - return Err("non-number on stack with %s".to_string()) - } - }; - let mut s: Vec = s.into_iter().collect(); - if flags.precision > s.len() { - let mut s_ = Vec::with_capacity(flags.precision); - let n = flags.precision - s.len(); - s_.extend(repeat(b'0').take(n)); - s_.extend(s.into_iter()); - s = s_; - } - assert!(!s.is_empty(), "string conversion produced empty result"); - match op { - FormatDigit => { - if flags.space && !(s[0] == b'-' || s[0] == b'+' ) { - s.insert(0, b' '); - } - } - FormatOctal => { - if flags.alternate && s[0] != b'0' { - s.insert(0, b'0'); - } - } - FormatHex => { - if flags.alternate { - let s_ = replace(&mut s, vec!(b'0', b'x')); - s.extend(s_.into_iter()); - } - } - FormatHEX => { - s = s.into_ascii_uppercase(); - if flags.alternate { - let s_ = replace(&mut s, vec!(b'0', b'X')); - s.extend(s_.into_iter()); - } - } - FormatString => unreachable!() - } - s - } - Words(s) => { - match op { - FormatString => { - let mut s = s.as_bytes().to_vec(); - if flags.precision > 0 && flags.precision < s.len() { - s.truncate(flags.precision); - } - s - } - _ => { - return Err(format!("non-string on stack with %{:?}", - op.to_char())) - } - } - } - }; - if flags.width > s.len() { - let n = flags.width - s.len(); - if flags.left { - s.extend(repeat(b' ').take(n)); - } else { - let mut s_ = Vec::with_capacity(flags.width); - s_.extend(repeat(b' ').take(n)); - s_.extend(s.into_iter()); - s = s_; - } - } - Ok(s) -} - -#[cfg(test)] -mod tests { - use super::{expand,Param,Words,Variables,Number}; - use std::result::Result::Ok; - - #[test] - fn test_basic_setabf() { - let s = b"\\E[48;5;%p1%dm"; - assert_eq!(expand(s, &[Number(1)], &mut Variables::new()).unwrap(), - "\\E[48;5;1m".bytes().collect::>()); - } - - #[test] - fn test_multiple_int_constants() { - assert_eq!(expand(b"%{1}%{2}%d%d", &[], &mut Variables::new()).unwrap(), - "21".bytes().collect::>()); - } - - #[test] - fn test_op_i() { - let mut vars = Variables::new(); - assert_eq!(expand(b"%p1%d%p2%d%p3%d%i%p1%d%p2%d%p3%d", - &[Number(1),Number(2),Number(3)], &mut vars), - Ok("123233".bytes().collect::>())); - assert_eq!(expand(b"%p1%d%p2%d%i%p1%d%p2%d", &[], &mut vars), - Ok("0011".bytes().collect::>())); - } - - #[test] - fn test_param_stack_failure_conditions() { - let mut varstruct = Variables::new(); - let vars = &mut varstruct; - fn get_res(fmt: &str, cap: &str, params: &[Param], vars: &mut Variables) -> - Result, String> - { - let mut u8v: Vec<_> = fmt.bytes().collect(); - u8v.extend(cap.bytes()); - expand(&u8v, params, vars) - } - - let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"]; - for &cap in &caps { - let res = get_res("", cap, &[], vars); - assert!(res.is_err(), - "Op {} succeeded incorrectly with 0 stack entries", cap); - let p = if cap == "%s" || cap == "%l" { - Words("foo".to_string()) - } else { - Number(97) - }; - let res = get_res("%p1", cap, &[p], vars); - assert!(res.is_ok(), - "Op {} failed with 1 stack entry: {}", cap, res.err().unwrap()); - } - let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"]; - for &cap in &caps { - let res = expand(cap.as_bytes(), &[], vars); - assert!(res.is_err(), - "Binop {} succeeded incorrectly with 0 stack entries", cap); - let res = get_res("%{1}", cap, &[], vars); - assert!(res.is_err(), - "Binop {} succeeded incorrectly with 1 stack entry", cap); - let res = get_res("%{1}%{2}", cap, &[], vars); - assert!(res.is_ok(), - "Binop {} failed with 2 stack entries: {:?}", cap, res.err().unwrap()); - } - } - - #[test] - fn test_push_bad_param() { - assert!(expand(b"%pa", &[], &mut Variables::new()).is_err()); - } - - #[test] - fn test_comparison_ops() { - let v = [('<', [1, 0, 0]), ('=', [0, 1, 0]), ('>', [0, 0, 1])]; - for &(op, bs) in &v { - let s = format!("%{{1}}%{{2}}%{}%d", op); - let res = expand(s.as_bytes(), &[], &mut Variables::new()); - assert!(res.is_ok(), res.err().unwrap()); - assert_eq!(res.unwrap(), [b'0' + bs[0]]); - let s = format!("%{{1}}%{{1}}%{}%d", op); - let res = expand(s.as_bytes(), &[], &mut Variables::new()); - assert!(res.is_ok(), res.err().unwrap()); - assert_eq!(res.unwrap(), [b'0' + bs[1]]); - let s = format!("%{{2}}%{{1}}%{}%d", op); - let res = expand(s.as_bytes(), &[], &mut Variables::new()); - assert!(res.is_ok(), res.err().unwrap()); - assert_eq!(res.unwrap(), [b'0' + bs[2]]); - } - } - - #[test] - fn test_conditionals() { - let mut vars = Variables::new(); - let s = b"\\E[%?%p1%{8}%<%t3%p1%d%e%p1%{16}%<%t9%p1%{8}%-%d%e38;5;%p1%d%;m"; - let res = expand(s, &[Number(1)], &mut vars); - assert!(res.is_ok(), res.err().unwrap()); - assert_eq!(res.unwrap(), - "\\E[31m".bytes().collect::>()); - let res = expand(s, &[Number(8)], &mut vars); - assert!(res.is_ok(), res.err().unwrap()); - assert_eq!(res.unwrap(), - "\\E[90m".bytes().collect::>()); - let res = expand(s, &[Number(42)], &mut vars); - assert!(res.is_ok(), res.err().unwrap()); - assert_eq!(res.unwrap(), - "\\E[38;5;42m".bytes().collect::>()); - } - - #[test] - fn test_format() { - let mut varstruct = Variables::new(); - let vars = &mut varstruct; - assert_eq!(expand(b"%p1%s%p2%2s%p3%2s%p4%.2s", - &[Words("foo".to_string()), - Words("foo".to_string()), - Words("f".to_string()), - Words("foo".to_string())], vars), - Ok("foofoo ffo".bytes().collect::>())); - assert_eq!(expand(b"%p1%:-4.2s", &[Words("foo".to_string())], vars), - Ok("fo ".bytes().collect::>())); - - assert_eq!(expand(b"%p1%d%p1%.3d%p1%5d%p1%:+d", &[Number(1)], vars), - Ok("1001 1+1".bytes().collect::>())); - assert_eq!(expand(b"%p1%o%p1%#o%p2%6.4x%p2%#6.4X", &[Number(15), Number(27)], vars), - Ok("17017 001b0X001B".bytes().collect::>())); - } -} diff --git a/src/libterm/terminfo/parser/compiled.rs b/src/libterm/terminfo/parser/compiled.rs deleted file mode 100644 index ef42d8c2506b3..0000000000000 --- a/src/libterm/terminfo/parser/compiled.rs +++ /dev/null @@ -1,358 +0,0 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#![allow(non_upper_case_globals)] - -//! ncurses-compatible compiled terminfo format parsing (term(5)) - -use std::collections::HashMap; -use std::io::prelude::*; -use std::io; -use super::super::TermInfo; - -// These are the orders ncurses uses in its compiled format (as of 5.9). Not sure if portable. - -pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin", - "no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type", - "hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above", - "memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok", - "dest_tabs_magic_smso", "tilde_glitch", "transparent_underline", "xon_xoff", "needs_xon_xoff", - "prtr_silent", "hard_cursor", "non_rev_rmcup", "no_pad_char", "non_dest_scroll_region", - "can_change", "back_color_erase", "hue_lightness_saturation", "col_addr_glitch", - "cr_cancels_micro_mode", "has_print_wheel", "row_addr_glitch", "semi_auto_right_margin", - "cpi_changes_res", "lpi_changes_res", "backspaces_with_bs", "crt_no_scrolling", - "no_correctly_working_cr", "gnu_has_meta_key", "linefeed_is_newline", "has_hardware_tabs", - "return_does_clr_eol"]; - -pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo", - "gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon", - "nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy", - "xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"]; - -pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines", - "lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal", - "width_status_line", "num_labels", "label_height", "label_width", "max_attributes", - "maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity", - "dot_vert_spacing", "dot_horz_spacing", "max_micro_address", "max_micro_jump", "micro_col_size", - "micro_line_size", "number_of_pins", "output_res_char", "output_res_line", - "output_res_horz_inch", "output_res_vert_inch", "print_rate", "wide_char_size", "buttons", - "bit_image_entwining", "bit_image_type", "magic_cookie_glitch_ul", "carriage_return_delay", - "new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"]; - -pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb", - "vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv", - "spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs", - "btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"]; - -pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return", - "change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos", - "column_address", "command_character", "cursor_address", "cursor_down", "cursor_home", - "cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right", - "cursor_to_ll", "cursor_up", "cursor_visible", "delete_character", "delete_line", - "dis_status_line", "down_half_line", "enter_alt_charset_mode", "enter_blink_mode", - "enter_bold_mode", "enter_ca_mode", "enter_delete_mode", "enter_dim_mode", "enter_insert_mode", - "enter_secure_mode", "enter_protected_mode", "enter_reverse_mode", "enter_standout_mode", - "enter_underline_mode", "erase_chars", "exit_alt_charset_mode", "exit_attribute_mode", - "exit_ca_mode", "exit_delete_mode", "exit_insert_mode", "exit_standout_mode", - "exit_underline_mode", "flash_screen", "form_feed", "from_status_line", "init_1string", - "init_2string", "init_3string", "init_file", "insert_character", "insert_line", - "insert_padding", "key_backspace", "key_catab", "key_clear", "key_ctab", "key_dc", "key_dl", - "key_down", "key_eic", "key_eol", "key_eos", "key_f0", "key_f1", "key_f10", "key_f2", "key_f3", - "key_f4", "key_f5", "key_f6", "key_f7", "key_f8", "key_f9", "key_home", "key_ic", "key_il", - "key_left", "key_ll", "key_npage", "key_ppage", "key_right", "key_sf", "key_sr", "key_stab", - "key_up", "keypad_local", "keypad_xmit", "lab_f0", "lab_f1", "lab_f10", "lab_f2", "lab_f3", - "lab_f4", "lab_f5", "lab_f6", "lab_f7", "lab_f8", "lab_f9", "meta_off", "meta_on", "newline", - "pad_char", "parm_dch", "parm_delete_line", "parm_down_cursor", "parm_ich", "parm_index", - "parm_insert_line", "parm_left_cursor", "parm_right_cursor", "parm_rindex", "parm_up_cursor", - "pkey_key", "pkey_local", "pkey_xmit", "print_screen", "prtr_off", "prtr_on", "repeat_char", - "reset_1string", "reset_2string", "reset_3string", "reset_file", "restore_cursor", - "row_address", "save_cursor", "scroll_forward", "scroll_reverse", "set_attributes", "set_tab", - "set_window", "tab", "to_status_line", "underline_char", "up_half_line", "init_prog", "key_a1", - "key_a3", "key_b2", "key_c1", "key_c3", "prtr_non", "char_padding", "acs_chars", "plab_norm", - "key_btab", "enter_xon_mode", "exit_xon_mode", "enter_am_mode", "exit_am_mode", "xon_character", - "xoff_character", "ena_acs", "label_on", "label_off", "key_beg", "key_cancel", "key_close", - "key_command", "key_copy", "key_create", "key_end", "key_enter", "key_exit", "key_find", - "key_help", "key_mark", "key_message", "key_move", "key_next", "key_open", "key_options", - "key_previous", "key_print", "key_redo", "key_reference", "key_refresh", "key_replace", - "key_restart", "key_resume", "key_save", "key_suspend", "key_undo", "key_sbeg", "key_scancel", - "key_scommand", "key_scopy", "key_screate", "key_sdc", "key_sdl", "key_select", "key_send", - "key_seol", "key_sexit", "key_sfind", "key_shelp", "key_shome", "key_sic", "key_sleft", - "key_smessage", "key_smove", "key_snext", "key_soptions", "key_sprevious", "key_sprint", - "key_sredo", "key_sreplace", "key_sright", "key_srsume", "key_ssave", "key_ssuspend", - "key_sundo", "req_for_input", "key_f11", "key_f12", "key_f13", "key_f14", "key_f15", "key_f16", - "key_f17", "key_f18", "key_f19", "key_f20", "key_f21", "key_f22", "key_f23", "key_f24", - "key_f25", "key_f26", "key_f27", "key_f28", "key_f29", "key_f30", "key_f31", "key_f32", - "key_f33", "key_f34", "key_f35", "key_f36", "key_f37", "key_f38", "key_f39", "key_f40", - "key_f41", "key_f42", "key_f43", "key_f44", "key_f45", "key_f46", "key_f47", "key_f48", - "key_f49", "key_f50", "key_f51", "key_f52", "key_f53", "key_f54", "key_f55", "key_f56", - "key_f57", "key_f58", "key_f59", "key_f60", "key_f61", "key_f62", "key_f63", "clr_bol", - "clear_margins", "set_left_margin", "set_right_margin", "label_format", "set_clock", - "display_clock", "remove_clock", "create_window", "goto_window", "hangup", "dial_phone", - "quick_dial", "tone", "pulse", "flash_hook", "fixed_pause", "wait_tone", "user0", "user1", - "user2", "user3", "user4", "user5", "user6", "user7", "user8", "user9", "orig_pair", - "orig_colors", "initialize_color", "initialize_pair", "set_color_pair", "set_foreground", - "set_background", "change_char_pitch", "change_line_pitch", "change_res_horz", - "change_res_vert", "define_char", "enter_doublewide_mode", "enter_draft_quality", - "enter_italics_mode", "enter_leftward_mode", "enter_micro_mode", "enter_near_letter_quality", - "enter_normal_quality", "enter_shadow_mode", "enter_subscript_mode", "enter_superscript_mode", - "enter_upward_mode", "exit_doublewide_mode", "exit_italics_mode", "exit_leftward_mode", - "exit_micro_mode", "exit_shadow_mode", "exit_subscript_mode", "exit_superscript_mode", - "exit_upward_mode", "micro_column_address", "micro_down", "micro_left", "micro_right", - "micro_row_address", "micro_up", "order_of_pins", "parm_down_micro", "parm_left_micro", - "parm_right_micro", "parm_up_micro", "select_char_set", "set_bottom_margin", - "set_bottom_margin_parm", "set_left_margin_parm", "set_right_margin_parm", "set_top_margin", - "set_top_margin_parm", "start_bit_image", "start_char_set_def", "stop_bit_image", - "stop_char_set_def", "subscript_characters", "superscript_characters", "these_cause_cr", - "zero_motion", "char_set_names", "key_mouse", "mouse_info", "req_mouse_pos", "get_mouse", - "set_a_foreground", "set_a_background", "pkey_plab", "device_type", "code_set_init", - "set0_des_seq", "set1_des_seq", "set2_des_seq", "set3_des_seq", "set_lr_margin", - "set_tb_margin", "bit_image_repeat", "bit_image_newline", "bit_image_carriage_return", - "color_names", "define_bit_image_region", "end_bit_image_region", "set_color_band", - "set_page_length", "display_pc_char", "enter_pc_charset_mode", "exit_pc_charset_mode", - "enter_scancode_mode", "exit_scancode_mode", "pc_term_options", "scancode_escape", - "alt_scancode_esc", "enter_horizontal_hl_mode", "enter_left_hl_mode", "enter_low_hl_mode", - "enter_right_hl_mode", "enter_top_hl_mode", "enter_vertical_hl_mode", "set_a_attributes", - "set_pglen_inch", "termcap_init2", "termcap_reset", "linefeed_if_not_lf", "backspace_if_not_bs", - "other_non_function_keys", "arrow_key_map", "acs_ulcorner", "acs_llcorner", "acs_urcorner", - "acs_lrcorner", "acs_ltee", "acs_rtee", "acs_btee", "acs_ttee", "acs_hline", "acs_vline", - "acs_plus", "memory_lock", "memory_unlock", "box_chars_1"]; - -pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear", - "_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1", - "ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc", - "dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc", - "rmir", "rmso", "rmul", "flash", "ff", "fsl", "is1", "is2", "is3", "if", "ich1", "il1", "ip", - "kbs", "ktbc", "kclr", "kctab", "_", "_", "kcud1", "_", "_", "_", "_", "_", "_", "_", "_", "_", - "_", "_", "_", "_", "_", "khome", "_", "_", "kcub1", "_", "knp", "kpp", "kcuf1", "_", "_", - "khts", "_", "rmkx", "smkx", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "rmm", "_", - "_", "pad", "dch", "dl", "cud", "ich", "indn", "il", "cub", "cuf", "rin", "cuu", "pfkey", - "pfloc", "pfx", "mc0", "mc4", "_", "rep", "rs1", "rs2", "rs3", "rf", "rc", "vpa", "sc", "ind", - "ri", "sgr", "_", "wind", "_", "tsl", "uc", "hu", "iprog", "_", "_", "_", "_", "_", "mc5p", - "rmp", "acsc", "pln", "kcbt", "smxon", "rmxon", "smam", "rmam", "xonc", "xoffc", "_", "smln", - "rmln", "_", "kcan", "kclo", "kcmd", "kcpy", "kcrt", "_", "kent", "kext", "kfnd", "khlp", - "kmrk", "kmsg", "kmov", "knxt", "kopn", "kopt", "kprv", "kprt", "krdo", "kref", "krfr", "krpl", - "krst", "kres", "ksav", "kspd", "kund", "kBEG", "kCAN", "kCMD", "kCPY", "kCRT", "_", "_", - "kslt", "kEND", "kEOL", "kEXT", "kFND", "kHLP", "kHOM", "_", "kLFT", "kMSG", "kMOV", "kNXT", - "kOPT", "kPRV", "kPRT", "kRDO", "kRPL", "kRIT", "kRES", "kSAV", "kSPD", "kUND", "rfi", "_", "_", - "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", - "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", - "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", - "dclk", "rmclk", "cwin", "wingo", "_", "dial", "qdial", "_", "_", "hook", "pause", "wait", "_", - "_", "_", "_", "_", "_", "_", "_", "_", "_", "op", "oc", "initc", "initp", "scp", "setf", - "setb", "cpi", "lpi", "chr", "cvr", "defc", "swidm", "sdrfq", "sitm", "slm", "smicm", "snlq", - "snrmq", "sshm", "ssubm", "ssupm", "sum", "rwidm", "ritm", "rlm", "rmicm", "rshm", "rsubm", - "rsupm", "rum", "mhpa", "mcud1", "mcub1", "mcuf1", "mvpa", "mcuu1", "porder", "mcud", "mcub", - "mcuf", "mcuu", "scs", "smgb", "smgbp", "smglp", "smgrp", "smgt", "smgtp", "sbim", "scsd", - "rbim", "rcsd", "subcs", "supcs", "docr", "zerom", "csnm", "kmous", "minfo", "reqmp", "getm", - "setaf", "setab", "pfxl", "devt", "csin", "s0ds", "s1ds", "s2ds", "s3ds", "smglr", "smgtb", - "birep", "binel", "bicr", "colornm", "defbi", "endbi", "setcolor", "slines", "dispc", "smpch", - "rmpch", "smsc", "rmsc", "pctrm", "scesc", "scesa", "ehhlm", "elhlm", "elohlm", "erhlm", - "ethlm", "evhlm", "sgr1", "slength", "OTi2", "OTrs", "OTnl", "OTbs", "OTko", "OTma", "OTG2", - "OTG3", "OTG1", "OTG4", "OTGR", "OTGL", "OTGU", "OTGD", "OTGH", "OTGV", "OTGC", "meml", "memu", - "box1"]; - -/// Parse a compiled terminfo entry, using long capability names if `longnames` is true -pub fn parse(file: &mut Read, longnames: bool) - -> Result, String> { - macro_rules! try { ($e:expr) => ( - match $e { - Ok(e) => e, - Err(e) => return Err(format!("{:?}", e)) - } - ) } - - let bnames; - let snames; - let nnames; - - if longnames { - bnames = boolfnames; - snames = stringfnames; - nnames = numfnames; - } else { - bnames = boolnames; - snames = stringnames; - nnames = numnames; - } - - // Check magic number - let magic = try!(read_le_u16(file)); - if magic != 0x011A { - return Err(format!("invalid magic number: expected {:x}, found {:x}", - 0x011A_usize, magic as usize)); - } - - let names_bytes = try!(read_le_u16(file)) as isize; - let bools_bytes = try!(read_le_u16(file)) as isize; - let numbers_count = try!(read_le_u16(file)) as isize; - let string_offsets_count = try!(read_le_u16(file)) as isize; - let string_table_bytes = try!(read_le_u16(file)) as isize; - - assert!(names_bytes > 0); - - if (bools_bytes as usize) > boolnames.len() { - return Err("incompatible file: more booleans than \ - expected".to_string()); - } - - if (numbers_count as usize) > numnames.len() { - return Err("incompatible file: more numbers than \ - expected".to_string()); - } - - if (string_offsets_count as usize) > stringnames.len() { - return Err("incompatible file: more string offsets than \ - expected".to_string()); - } - - // don't read NUL - let bytes = try!(read_exact(file, names_bytes as usize - 1)); - let names_str = match String::from_utf8(bytes) { - Ok(s) => s, - Err(_) => return Err("input not utf-8".to_string()), - }; - - let term_names: Vec = names_str.split('|') - .map(|s| s.to_string()) - .collect(); - - try!(read_byte(file)); // consume NUL - - let mut bools_map = HashMap::new(); - if bools_bytes != 0 { - for i in 0..bools_bytes { - let b = try!(read_byte(file)); - if b == 1 { - bools_map.insert(bnames[i as usize].to_string(), true); - } - } - } - - if (bools_bytes + names_bytes) % 2 == 1 { - try!(read_byte(file)); // compensate for padding - } - - let mut numbers_map = HashMap::new(); - if numbers_count != 0 { - for i in 0..numbers_count { - let n = try!(read_le_u16(file)); - if n != 0xFFFF { - numbers_map.insert(nnames[i as usize].to_string(), n); - } - } - } - - let mut string_map = HashMap::new(); - - if string_offsets_count != 0 { - let mut string_offsets = Vec::with_capacity(10); - for _ in 0..string_offsets_count { - string_offsets.push(try!(read_le_u16(file))); - } - - let string_table = try!(read_exact(file, string_table_bytes as usize)); - - if string_table.len() != string_table_bytes as usize { - return Err("error: hit EOF before end of string \ - table".to_string()); - } - - for (i, v) in string_offsets.iter().enumerate() { - let offset = *v; - if offset == 0xFFFF { // non-entry - continue; - } - - let name = if snames[i] == "_" { - stringfnames[i] - } else { - snames[i] - }; - - if offset == 0xFFFE { - // undocumented: FFFE indicates cap@, which means the capability is not present - // unsure if the handling for this is correct - string_map.insert(name.to_string(), Vec::new()); - continue; - } - - - // Find the offset of the NUL we want to go to - let nulpos = string_table[offset as usize .. string_table_bytes as usize] - .iter().position(|&b| b == 0); - match nulpos { - Some(len) => { - string_map.insert(name.to_string(), - string_table[offset as usize .. - (offset as usize + len)].to_vec()) - }, - None => { - return Err("invalid file: missing NUL in \ - string_table".to_string()); - } - }; - } - } - - // And that's all there is to it - Ok(box TermInfo { - names: term_names, - bools: bools_map, - numbers: numbers_map, - strings: string_map - }) -} - -fn read_le_u16(r: &mut R) -> io::Result { - let mut b = [0; 2]; - assert_eq!(try!(r.read(&mut b)), 2); - Ok((b[0] as u16) | ((b[1] as u16) << 8)) -} - -fn read_byte(r: &mut R) -> io::Result { - let mut b = [0; 1]; - assert_eq!(try!(r.read(&mut b)), 1); - Ok(b[0]) -} - -fn read_exact(r: &mut R, sz: usize) -> io::Result> { - let mut v = Vec::with_capacity(sz); - try!(r.take(sz as u64).read_to_end(&mut v)); - assert_eq!(v.len(), sz); - Ok(v) -} - -/// Create a dummy TermInfo struct for msys terminals -pub fn msys_terminfo() -> Box { - let mut strings = HashMap::new(); - strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec()); - strings.insert("bold".to_string(), b"\x1B[1m".to_vec()); - strings.insert("setaf".to_string(), b"\x1B[3%p1%dm".to_vec()); - strings.insert("setab".to_string(), b"\x1B[4%p1%dm".to_vec()); - box TermInfo { - names: vec!("cygwin".to_string()), // msys is a fork of an older cygwin version - bools: HashMap::new(), - numbers: HashMap::new(), - strings: strings - } -} - -#[cfg(test)] -mod tests { - - use super::{boolnames, boolfnames, numnames, numfnames, stringnames, stringfnames}; - - #[test] - fn test_veclens() { - assert_eq!(boolfnames.len(), boolnames.len()); - assert_eq!(numfnames.len(), numnames.len()); - assert_eq!(stringfnames.len(), stringnames.len()); - } -} diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs deleted file mode 100644 index 16062060df087..0000000000000 --- a/src/libterm/terminfo/searcher.rs +++ /dev/null @@ -1,119 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! ncurses-compatible database discovery -//! -//! Does not support hashed database, only filesystem! - -use std::env; -use std::fs::File; -use std::io::prelude::*; -use std::path::PathBuf; - -/// Return path to database entry for `term` -#[allow(deprecated)] -pub fn get_dbpath_for_term(term: &str) -> Option> { - if term.is_empty() { - return None; - } - - let homedir = env::home_dir(); - - let mut dirs_to_search = Vec::new(); - let first_char = term.char_at(0); - - // Find search directory - match env::var_os("TERMINFO") { - Some(dir) => dirs_to_search.push(PathBuf::from(dir)), - None => { - if homedir.is_some() { - // ncurses compatibility; - dirs_to_search.push(homedir.unwrap().join(".terminfo")) - } - match env::var("TERMINFO_DIRS") { - Ok(dirs) => for i in dirs.split(':') { - if i == "" { - dirs_to_search.push(PathBuf::from("/usr/share/terminfo")); - } else { - dirs_to_search.push(PathBuf::from(i)); - } - }, - // Found nothing in TERMINFO_DIRS, use the default paths: - // According to /etc/terminfo/README, after looking at - // ~/.terminfo, ncurses will search /etc/terminfo, then - // /lib/terminfo, and eventually /usr/share/terminfo. - Err(..) => { - dirs_to_search.push(PathBuf::from("/etc/terminfo")); - dirs_to_search.push(PathBuf::from("/lib/terminfo")); - dirs_to_search.push(PathBuf::from("/usr/share/terminfo")); - } - } - } - }; - - // Look for the terminal in all of the search directories - for p in &dirs_to_search { - if p.exists() { - let f = first_char.to_string(); - let newp = p.join(&f).join(term); - if newp.exists() { - return Some(box newp); - } - // on some installations the dir is named after the hex of the char (e.g. OS X) - let f = format!("{:x}", first_char as usize); - let newp = p.join(&f).join(term); - if newp.exists() { - return Some(box newp); - } - } - } - None -} - -/// Return open file for `term` -pub fn open(term: &str) -> Result { - match get_dbpath_for_term(term) { - Some(x) => { - match File::open(&*x) { - Ok(file) => Ok(file), - Err(e) => Err(format!("error opening file: {:?}", e)), - } - } - None => { - Err(format!("could not find terminfo entry for {:?}", term)) - } - } -} - -#[test] -#[ignore(reason = "buildbots don't have ncurses installed and I can't mock everything I need")] -fn test_get_dbpath_for_term() { - // woefully inadequate test coverage - // note: current tests won't work with non-standard terminfo hierarchies (e.g. OS X's) - use std::env; - // FIXME (#9639): This needs to handle non-utf8 paths - fn x(t: &str) -> String { - let p = get_dbpath_for_term(t).expect("no terminfo entry found"); - p.to_str().unwrap().to_string() - }; - assert!(x("screen") == "/usr/share/terminfo/s/screen"); - assert!(get_dbpath_for_term("") == None); - env::set_var("TERMINFO_DIRS", ":"); - assert!(x("screen") == "/usr/share/terminfo/s/screen"); - env::remove_var("TERMINFO_DIRS"); -} - -#[test] -#[ignore(reason = "see test_get_dbpath_for_term")] -fn test_open() { - open("screen").unwrap(); - let t = open("nonexistent terminal that hopefully does not exist"); - assert!(t.is_err()); -} diff --git a/src/libterm/win.rs b/src/libterm/win.rs deleted file mode 100644 index 66ef5e8661797..0000000000000 --- a/src/libterm/win.rs +++ /dev/null @@ -1,199 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Windows console handling - -// FIXME (#13400): this is only a tiny fraction of the Windows console api - -extern crate libc; - -use std::io; -use std::io::prelude::*; - -use attr; -use color; -use {Terminal,UnwrappableTerminal}; - -/// A Terminal implementation which uses the Win32 Console API. -pub struct WinConsole { - buf: T, - def_foreground: color::Color, - def_background: color::Color, - foreground: color::Color, - background: color::Color, -} - -#[allow(non_snake_case)] -#[repr(C)] -struct CONSOLE_SCREEN_BUFFER_INFO { - dwSize: [libc::c_short; 2], - dwCursorPosition: [libc::c_short; 2], - wAttributes: libc::WORD, - srWindow: [libc::c_short; 4], - dwMaximumWindowSize: [libc::c_short; 2], -} - -#[allow(non_snake_case)] -#[link(name = "kernel32")] -extern "system" { - fn SetConsoleTextAttribute(handle: libc::HANDLE, attr: libc::WORD) -> libc::BOOL; - fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE; - fn GetConsoleScreenBufferInfo(handle: libc::HANDLE, - info: *mut CONSOLE_SCREEN_BUFFER_INFO) -> libc::BOOL; -} - -fn color_to_bits(color: color::Color) -> u16 { - // magic numbers from mingw-w64's wincon.h - - let bits = match color % 8 { - color::BLACK => 0, - color::BLUE => 0x1, - color::GREEN => 0x2, - color::RED => 0x4, - color::YELLOW => 0x2 | 0x4, - color::MAGENTA => 0x1 | 0x4, - color::CYAN => 0x1 | 0x2, - color::WHITE => 0x1 | 0x2 | 0x4, - _ => unreachable!() - }; - - if color >= 8 { - bits | 0x8 - } else { - bits - } -} - -fn bits_to_color(bits: u16) -> color::Color { - let color = match bits & 0x7 { - 0 => color::BLACK, - 0x1 => color::BLUE, - 0x2 => color::GREEN, - 0x4 => color::RED, - 0x6 => color::YELLOW, - 0x5 => color::MAGENTA, - 0x3 => color::CYAN, - 0x7 => color::WHITE, - _ => unreachable!() - }; - - color | (bits & 0x8) // copy the hi-intensity bit -} - -impl WinConsole { - fn apply(&mut self) { - let _unused = self.buf.flush(); - let mut accum: libc::WORD = 0; - accum |= color_to_bits(self.foreground); - accum |= color_to_bits(self.background) << 4; - - unsafe { - // Magic -11 means stdout, from - // http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231%28v=vs.85%29.aspx - // - // You may be wondering, "but what about stderr?", and the answer - // to that is that setting terminal attributes on the stdout - // handle also sets them for stderr, since they go to the same - // terminal! Admittedly, this is fragile, since stderr could be - // redirected to a different console. This is good enough for - // rustc though. See #13400. - let out = GetStdHandle(-11i32 as libc::DWORD); - SetConsoleTextAttribute(out, accum); - } - } - - /// Returns `None` whenever the terminal cannot be created for some - /// reason. - pub fn new(out: T) -> Option+Send+'static>> { - let fg; - let bg; - unsafe { - let mut buffer_info = ::std::mem::uninitialized(); - if GetConsoleScreenBufferInfo(GetStdHandle(-11i32 as libc::DWORD), - &mut buffer_info) != 0 { - fg = bits_to_color(buffer_info.wAttributes); - bg = bits_to_color(buffer_info.wAttributes >> 4); - } else { - fg = color::WHITE; - bg = color::BLACK; - } - } - Some(box WinConsole { buf: out, - def_foreground: fg, def_background: bg, - foreground: fg, background: bg }) - } -} - -impl Write for WinConsole { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.buf.write(buf) - } - - fn flush(&mut self) -> io::Result<()> { - self.buf.flush() - } -} - -impl Terminal for WinConsole { - fn fg(&mut self, color: color::Color) -> io::Result { - self.foreground = color; - self.apply(); - - Ok(true) - } - - fn bg(&mut self, color: color::Color) -> io::Result { - self.background = color; - self.apply(); - - Ok(true) - } - - fn attr(&mut self, attr: attr::Attr) -> io::Result { - match attr { - attr::ForegroundColor(f) => { - self.foreground = f; - self.apply(); - Ok(true) - }, - attr::BackgroundColor(b) => { - self.background = b; - self.apply(); - Ok(true) - }, - _ => Ok(false) - } - } - - fn supports_attr(&self, attr: attr::Attr) -> bool { - // it claims support for underscore and reverse video, but I can't get - // it to do anything -cmr - match attr { - attr::ForegroundColor(_) | attr::BackgroundColor(_) => true, - _ => false - } - } - - fn reset(&mut self) -> io::Result<()> { - self.foreground = self.def_foreground; - self.background = self.def_background; - self.apply(); - - Ok(()) - } - - fn get_ref<'a>(&'a self) -> &'a T { &self.buf } - - fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.buf } -} - -impl UnwrappableTerminal for WinConsole { - fn unwrap(self) -> T { self.buf } -} From cfd7b849a90a2c7eb706f8fc47a476b7a0765dbf Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 5 Jun 2015 10:54:31 -0700 Subject: [PATCH 2/6] Squashed 'src/external/term/' content from commit 6822aed git-subtree-dir: src/external/term git-subtree-split: 6822aed1980f572f5d241c7bacf6971b5e2775ed --- .gitignore | 2 + .travis.yml | 25 ++ Cargo.toml | 21 + LICENSE-APACHE | 201 +++++++++ LICENSE-MIT | 25 ++ README.md | 25 ++ appveyor.yml | 11 + src/lib.rs | 247 +++++++++++ src/terminfo/mod.rs | 275 +++++++++++++ src/terminfo/parm.rs | 707 ++++++++++++++++++++++++++++++++ src/terminfo/parser/compiled.rs | 359 ++++++++++++++++ src/terminfo/searcher.rs | 79 ++++ src/win.rs | 274 +++++++++++++ 13 files changed, 2251 insertions(+) create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 Cargo.toml create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100644 appveyor.yml create mode 100644 src/lib.rs create mode 100644 src/terminfo/mod.rs create mode 100644 src/terminfo/parm.rs create mode 100644 src/terminfo/parser/compiled.rs create mode 100644 src/terminfo/searcher.rs create mode 100644 src/win.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000..4fffb2f89cbd8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +/Cargo.lock diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000000..3a2aef7a08409 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,25 @@ +language: rust +rust: + - 1.0.0 + - beta + - nightly +sudo: false +script: + - cargo build --verbose + - cargo test --verbose + - cargo doc +after_success: | + [ $TRAVIS_BRANCH = master ] && + [ $TRAVIS_PULL_REQUEST = false ] && + [ $TRAVIS_RUST_VERSION = nightly ] && + echo '' > target/doc/index.html && + pip install ghp-import --user $USER && + $HOME/.local/bin/ghp-import -n target/doc && + git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages + +env: + global: + secure: Na+sfNSVDR8SdK5lFIbhvPj6zU+0NBTbHgugcCzhDEkvkaO0GLMYdhD3g+yAOR2DjIgn2Jdv1TGJkfD3KTPut52mmxftLkeA9528zqsiLD00JbPj8S71BcHIytOyGth84u+GdpNBZRvt5Fd3Pz1OgoEEeP/3O6uMe52+3Um/oPw= +notifications: + email: + on_success: never diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000000000..ef9f6a58bd778 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,21 @@ +[package] + +name = "term" +version = "0.2.7" +authors = ["The Rust Project Developers"] +license = "MIT/Apache-2.0" +readme = "README.md" +repository = "https://github.com/rust-lang/term" +homepage = "https://github.com/rust-lang/term" +documentation = "http://doc.rust-lang.org/term" +description = """ +A terminal formatting library +""" + +[target.i686-pc-windows-gnu.dependencies] +winapi = "0.1" +kernel32-sys = "0.1" + +[target.x86_64-pc-windows-gnu.dependencies] +winapi = "0.1" +kernel32-sys = "0.1" diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000000000..16fe87b06e802 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000000000..39d4bdb5acd31 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000000..1453c197d5132 --- /dev/null +++ b/README.md @@ -0,0 +1,25 @@ +term +==== + +A Rust library for terminfo parsing and terminal colors. + +[![Build Status](https://travis-ci.org/rust-lang/term.svg?branch=master)](https://travis-ci.org/rust-lang/term) +[![Build status](https://ci.appveyor.com/api/projects/status/422c2ovagestqw89?svg=true)](https://ci.appveyor.com/project/alexcrichton/term) + +[Documentation](http://doc.rust-lang.org/term) + +## Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] + +term = "*" +``` + +and this to your crate root: + +```rust +extern crate term; +``` diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000000000..f74c851ad7100 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,11 @@ +install: + - ps: Start-FileDownload 'https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe' + - rust-nightly-i686-pc-windows-gnu.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --verbose diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000000000..ea4a10bcbaf96 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,247 @@ +// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Terminal formatting library. +//! +//! This crate provides the `Terminal` trait, which abstracts over an [ANSI +//! Terminal][ansi] to provide color printing, among other things. There are two +//! implementations, the `TerminfoTerminal`, which uses control characters from +//! a [terminfo][ti] database, and `WinConsole`, which uses the [Win32 Console +//! API][win]. +//! +//! # Usage +//! +//! This crate is [on crates.io](https://crates.io/crates/term) and can be +//! used by adding `term` to the dependencies in your project's `Cargo.toml`. +//! +//! ```toml +//! [dependencies] +//! +//! term = "0.2" +//! ``` +//! +//! and this to your crate root: +//! +//! ```rust +//! extern crate term; +//! ``` +//! +//! # Examples +//! +//! ```no_run +//! extern crate term; +//! use std::io::prelude::*; +//! +//! fn main() { +//! let mut t = term::stdout().unwrap(); +//! +//! t.fg(term::color::GREEN).unwrap(); +//! (write!(t, "hello, ")).unwrap(); +//! +//! t.fg(term::color::RED).unwrap(); +//! (writeln!(t, "world!")).unwrap(); +//! +//! assert!(t.reset().unwrap()); +//! } +//! ``` +//! +//! [ansi]: https://en.wikipedia.org/wiki/ANSI_escape_code +//! [win]: http://msdn.microsoft.com/en-us/library/windows/desktop/ms682010%28v=vs.85%29.aspx +//! [ti]: https://en.wikipedia.org/wiki/Terminfo + +#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "http://www.rust-lang.org/favicon.ico", + html_root_url = "http://doc.rust-lang.org/nightly/", + html_playground_url = "http://play.rust-lang.org/")] +#![deny(missing_docs)] +#![cfg_attr(test, deny(warnings))] +#![cfg_attr(rust_build, feature(staged_api))] +#![cfg_attr(rust_build, staged_api)] +#![cfg_attr(rust_build, + unstable(feature = "rustc_private", + reason = "use the crates.io `term` library instead"))] + +use std::io::prelude::*; + +pub use terminfo::TerminfoTerminal; +#[cfg(windows)] +pub use win::WinConsole; + +use std::io::{self, Stdout, Stderr}; + +pub mod terminfo; + +#[cfg(windows)] +mod win; + +/// Alias for stdout terminals. +pub type StdoutTerminal = Terminal + Send; +/// Alias for stderr terminals. +pub type StderrTerminal = Terminal + Send; + +#[cfg(not(windows))] +/// Return a Terminal wrapping stdout, or None if a terminal couldn't be +/// opened. +pub fn stdout() -> Option> { + TerminfoTerminal::new(io::stdout()).map(|t| { + Box::new(t) as Box + }) +} + +#[cfg(windows)] +/// Return a Terminal wrapping stdout, or None if a terminal couldn't be +/// opened. +pub fn stdout() -> Option> { + TerminfoTerminal::new(io::stdout()).map(|t| { + Box::new(t) as Box + }).or_else(|| WinConsole::new(io::stdout()).ok().map(|t| { + Box::new(t) as Box + })) +} + +#[cfg(not(windows))] +/// Return a Terminal wrapping stderr, or None if a terminal couldn't be +/// opened. +pub fn stderr() -> Option> { + TerminfoTerminal::new(io::stderr()).map(|t| { + Box::new(t) as Box + }) +} + +#[cfg(windows)] +/// Return a Terminal wrapping stderr, or None if a terminal couldn't be +/// opened. +pub fn stderr() -> Option> { + TerminfoTerminal::new(io::stderr()).map(|t| { + Box::new(t) as Box + }).or_else(|| WinConsole::new(io::stderr()).ok().map(|t| { + Box::new(t) as Box + })) +} + + +/// Terminal color definitions +pub mod color { + /// Number for a terminal color + pub type Color = u16; + + pub const BLACK: Color = 0; + pub const RED: Color = 1; + pub const GREEN: Color = 2; + pub const YELLOW: Color = 3; + pub const BLUE: Color = 4; + pub const MAGENTA: Color = 5; + pub const CYAN: Color = 6; + pub const WHITE: Color = 7; + + pub const BRIGHT_BLACK: Color = 8; + pub const BRIGHT_RED: Color = 9; + pub const BRIGHT_GREEN: Color = 10; + pub const BRIGHT_YELLOW: Color = 11; + pub const BRIGHT_BLUE: Color = 12; + pub const BRIGHT_MAGENTA: Color = 13; + pub const BRIGHT_CYAN: Color = 14; + pub const BRIGHT_WHITE: Color = 15; +} + +/// Terminal attributes for use with term.attr(). +/// +/// Most attributes can only be turned on and must be turned off with term.reset(). +/// The ones that can be turned off explicitly take a boolean value. +/// Color is also represented as an attribute for convenience. +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum Attr { + /// Bold (or possibly bright) mode + Bold, + /// Dim mode, also called faint or half-bright. Often not supported + Dim, + /// Italics mode. Often not supported + Italic(bool), + /// Underline mode + Underline(bool), + /// Blink mode + Blink, + /// Standout mode. Often implemented as Reverse, sometimes coupled with Bold + Standout(bool), + /// Reverse mode, inverts the foreground and background colors + Reverse, + /// Secure mode, also called invis mode. Hides the printed text + Secure, + /// Convenience attribute to set the foreground color + ForegroundColor(color::Color), + /// Convenience attribute to set the background color + BackgroundColor(color::Color) +} + +/// A terminal with similar capabilities to an ANSI Terminal +/// (foreground/background colors etc). +pub trait Terminal: Write { + /// The terminal's output writer type. + type Output: Write; + + /// Sets the foreground color to the given color. + /// + /// If the color is a bright color, but the terminal only supports 8 colors, + /// the corresponding normal color will be used instead. + /// + /// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)` + /// if there was an I/O error. + fn fg(&mut self, color: color::Color) -> io::Result; + + /// Sets the background color to the given color. + /// + /// If the color is a bright color, but the terminal only supports 8 colors, + /// the corresponding normal color will be used instead. + /// + /// Returns `Ok(true)` if the color was set, `Ok(false)` otherwise, and `Err(e)` + /// if there was an I/O error. + fn bg(&mut self, color: color::Color) -> io::Result; + + /// Sets the given terminal attribute, if supported. Returns `Ok(true)` + /// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if + /// there was an I/O error. + fn attr(&mut self, attr: Attr) -> io::Result; + + /// Returns whether the given terminal attribute is supported. + fn supports_attr(&self, attr: Attr) -> bool; + + /// Resets all terminal attributes and color to the default. + /// + /// Returns `Ok(true)` if the terminal was reset, `Ok(false)` otherwise, and `Err(e)` if there + /// was an I/O error. + fn reset(&mut self) -> io::Result; + + /// Moves the cursor up one line. + /// + /// Returns `Ok(true)` if the cursor was moved, `Ok(false)` otherwise, and `Err(e)` + /// if there was an I/O error. + fn cursor_up(&mut self) -> io::Result; + + /// Deletes the text from the cursor location to the end of the line. + /// + /// Returns `Ok(true)` if the text was deleted, `Ok(false)` otherwise, and `Err(e)` + /// if there was an I/O error. + fn delete_line(&mut self) -> io::Result; + + /// Moves the cursor to the left edge of the current line. + /// + /// Returns `Ok(true)` if the text was deleted, `Ok(false)` otherwise, and `Err(e)` + /// if there was an I/O error. + fn carriage_return(&mut self) -> io::Result; + + /// Gets an immutable reference to the stream inside + fn get_ref<'a>(&'a self) -> &'a Self::Output; + + /// Gets a mutable reference to the stream inside + fn get_mut<'a>(&'a mut self) -> &'a mut Self::Output; + + /// Returns the contained stream, destroying the `Terminal` + fn into_inner(self) -> Self::Output where Self: Sized; +} diff --git a/src/terminfo/mod.rs b/src/terminfo/mod.rs new file mode 100644 index 0000000000000..36289fd32ee3d --- /dev/null +++ b/src/terminfo/mod.rs @@ -0,0 +1,275 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Terminfo database interface. + +use std::collections::HashMap; +use std::env; +use std::error; +use std::fmt; +use std::fs::File; +use std::io::prelude::*; +use std::io; +use std::path::Path; + +use Attr; +use color; +use Terminal; +use self::searcher::get_dbpath_for_term; +use self::parser::compiled::{parse, msys_terminfo}; +use self::parm::{expand, Variables, Param}; + + +/// A parsed terminfo database entry. +#[derive(Debug)] +pub struct TermInfo { + /// Names for the terminal + pub names: Vec , + /// Map of capability name to boolean value + pub bools: HashMap, + /// Map of capability name to numeric value + pub numbers: HashMap, + /// Map of capability name to raw (unexpanded) string + pub strings: HashMap > +} + +/// A terminfo creation error. +#[derive(Debug)] +pub enum Error { + /// TermUnset Indicates that the environment doesn't include enough information to find + /// the terminfo entry. + TermUnset, + /// MalformedTerminfo indicates that parsing the terminfo entry failed. + MalformedTerminfo(String), + /// io::Error forwards any io::Errors encountered when finding or reading the terminfo entry. + IoError(io::Error), +} + +impl error::Error for Error { + fn description(&self) -> &str { "failed to create TermInfo" } + + fn cause(&self) -> Option<&error::Error> { + use self::Error::*; + match self { + &IoError(ref e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use self::Error::*; + match self { + &TermUnset => Ok(()), + &MalformedTerminfo(ref e) => e.fmt(f), + &IoError(ref e) => e.fmt(f), + } + } +} + +impl TermInfo { + /// Create a TermInfo based on current environment. + pub fn from_env() -> Result { + let term = match env::var("TERM") { + Ok(name) => TermInfo::from_name(&name), + Err(..) => return Err(Error::TermUnset), + }; + + if term.is_err() && env::var("MSYSCON").ok().map_or(false, |s| "mintty.exe" == s) { + // msys terminal + Ok(msys_terminfo()) + } else { + term + } + } + + /// Create a TermInfo for the named terminal. + pub fn from_name(name: &str) -> Result { + get_dbpath_for_term(name).ok_or_else(|| { + Error::IoError(io::Error::new(io::ErrorKind::NotFound, + "terminfo file not found")) + }).and_then(|p| { + TermInfo::from_path(&p) + }) + } + + /// Parse the given TermInfo. + pub fn from_path(path: &Path) -> Result { + File::open(path).map_err(|e| { + Error::IoError(e) + }).and_then(|ref mut file| { + parse(file, false).map_err(|e| { + Error::MalformedTerminfo(e) + }) + }) + } +} + +pub mod searcher; + +/// TermInfo format parsing. +pub mod parser { + //! ncurses-compatible compiled terminfo format parsing (term(5)) + pub mod compiled; +} +pub mod parm; + + +fn cap_for_attr(attr: Attr) -> &'static str { + match attr { + Attr::Bold => "bold", + Attr::Dim => "dim", + Attr::Italic(true) => "sitm", + Attr::Italic(false) => "ritm", + Attr::Underline(true) => "smul", + Attr::Underline(false) => "rmul", + Attr::Blink => "blink", + Attr::Standout(true) => "smso", + Attr::Standout(false) => "rmso", + Attr::Reverse => "rev", + Attr::Secure => "invis", + Attr::ForegroundColor(_) => "setaf", + Attr::BackgroundColor(_) => "setab" + } +} + +/// A Terminal that knows how many colors it supports, with a reference to its +/// parsed Terminfo database record. +pub struct TerminfoTerminal { + num_colors: u16, + out: T, + ti: TermInfo, +} + +impl Terminal for TerminfoTerminal { + type Output = T; + fn fg(&mut self, color: color::Color) -> io::Result { + let color = self.dim_if_necessary(color); + if self.num_colors > color { + return self.apply_cap("setaf", &[Param::Number(color as i16)]); + } + Ok(false) + } + + fn bg(&mut self, color: color::Color) -> io::Result { + let color = self.dim_if_necessary(color); + if self.num_colors > color { + return self.apply_cap("setab", &[Param::Number(color as i16)]); + } + Ok(false) + } + + fn attr(&mut self, attr: Attr) -> io::Result { + match attr { + Attr::ForegroundColor(c) => self.fg(c), + Attr::BackgroundColor(c) => self.bg(c), + _ => self.apply_cap(cap_for_attr(attr), &[]), + } + } + + fn supports_attr(&self, attr: Attr) -> bool { + match attr { + Attr::ForegroundColor(_) | Attr::BackgroundColor(_) => { + self.num_colors > 0 + } + _ => { + let cap = cap_for_attr(attr); + self.ti.strings.get(cap).is_some() + } + } + } + + fn reset(&mut self) -> io::Result { + // are there any terminals that have color/attrs and not sgr0? + // Try falling back to sgr, then op + let cmd = match [ + "sg0", "sgr", "op" + ].iter().filter_map(|cap| { + self.ti.strings.get(*cap) + }).next() { + Some(op) => match expand(&op, &[], &mut Variables::new()) { + Ok(cmd) => cmd, + Err(_) => return Ok(false), + }, + None => return Ok(false), + }; + + self.out.write_all(&cmd).map(|_|true) + } + + fn cursor_up(&mut self) -> io::Result { + self.apply_cap("cuu1", &[]) + } + + fn delete_line(&mut self) -> io::Result { + self.apply_cap("dl", &[]) + } + + fn carriage_return(&mut self) -> io::Result { + self.apply_cap("cr", &[]) + } + + fn get_ref<'a>(&'a self) -> &'a T { &self.out } + + fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.out } + + fn into_inner(self) -> T where Self: Sized { self.out } +} + +impl TerminfoTerminal { + /// Create a new TerminfoTerminal with the given TermInfo and Write. + pub fn new_with_terminfo(out: T, terminfo: TermInfo) -> TerminfoTerminal { + let nc = if terminfo.strings.contains_key("setaf") + && terminfo.strings.contains_key("setab") { + terminfo.numbers.get("colors").map_or(0, |&n| n) + } else { 0 }; + + TerminfoTerminal { + out: out, + ti: terminfo, + num_colors: nc, + } + } + + /// Create a new TerminfoTerminal for the current environment with the given Write. + /// + /// Returns `None` when the terminfo cannot be found or parsed. + pub fn new(out: T) -> Option> { + TermInfo::from_env().map(move |ti| TerminfoTerminal::new_with_terminfo(out, ti)).ok() + } + + fn dim_if_necessary(&self, color: color::Color) -> color::Color { + if color >= self.num_colors && color >= 8 && color < 16 { + color-8 + } else { color } + } + + fn apply_cap(&mut self, cmd: &str, params: &[Param]) -> io::Result { + if let Some(cmd) = self.ti.strings.get(cmd) { + if let Ok(s) = expand(&cmd, params, &mut Variables::new()) { + try!(self.out.write_all(&s)); + return Ok(true) + } + } + Ok(false) + } +} + + +impl Write for TerminfoTerminal { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.out.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.out.flush() + } +} diff --git a/src/terminfo/parm.rs b/src/terminfo/parm.rs new file mode 100644 index 0000000000000..17061113397fc --- /dev/null +++ b/src/terminfo/parm.rs @@ -0,0 +1,707 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Parameterized string expansion + +use self::Param::*; +use self::States::*; +use self::FormatState::*; +use self::FormatOp::*; + +use std::ascii::AsciiExt; +use std::iter::repeat; +use std::mem::replace; + +#[derive(Clone, Copy, PartialEq)] +enum States { + Nothing, + Percent, + SetVar, + GetVar, + PushParam, + CharConstant, + CharClose, + IntConstant(i16), + FormatPattern(Flags, FormatState), + SeekIfElse(usize), + SeekIfElsePercent(usize), + SeekIfEnd(usize), + SeekIfEndPercent(usize) +} + +#[derive(Copy, PartialEq, Clone)] +enum FormatState { + FormatStateFlags, + FormatStateWidth, + FormatStatePrecision +} + +/// Types of parameters a capability can use +#[allow(missing_docs)] +#[derive(Clone)] +pub enum Param { + Words(String), + Number(i16) +} + +/// Container for static and dynamic variable arrays +pub struct Variables { + /// Static variables A-Z + sta: [Param; 26], + /// Dynamic variables a-z + dyn: [Param; 26] +} + +impl Variables { + /// Return a new zero-initialized Variables + pub fn new() -> Variables { + Variables { + sta: [ + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), + ], + dyn: [ + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), + ], + } + } +} + +/// Expand a parameterized capability +/// +/// # Arguments +/// * `cap` - string to expand +/// * `params` - vector of params for %p1 etc +/// * `vars` - Variables struct for %Pa etc +/// +/// To be compatible with ncurses, `vars` should be the same between calls to `expand` for +/// multiple capabilities for the same terminal. +pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables) + -> Result , String> { + let mut state = Nothing; + + // expanded cap will only rarely be larger than the cap itself + let mut output = Vec::with_capacity(cap.len()); + + let mut stack: Vec = Vec::new(); + + // Copy parameters into a local vector for mutability + let mut mparams = [ + Number(0), Number(0), Number(0), Number(0), Number(0), + Number(0), Number(0), Number(0), Number(0), + ]; + for (dst, src) in mparams.iter_mut().zip(params.iter()) { + *dst = (*src).clone(); + } + + for &c in cap.iter() { + let cur = c as char; + let mut old_state = state; + match state { + Nothing => { + if cur == '%' { + state = Percent; + } else { + output.push(c); + } + }, + Percent => { + match cur { + '%' => { output.push(c); state = Nothing }, + 'c' => if stack.len() > 0 { + match stack.pop().unwrap() { + // if c is 0, use 0200 (128) for ncurses compatibility + Number(c) => { + output.push(if c == 0 { + 128u8 + } else { + c as u8 + }) + } + _ => return Err("a non-char was used with %c".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + 'p' => state = PushParam, + 'P' => state = SetVar, + 'g' => state = GetVar, + '\'' => state = CharConstant, + '{' => state = IntConstant(0), + 'l' => if stack.len() > 0 { + match stack.pop().unwrap() { + Words(s) => stack.push(Number(s.len() as i16)), + _ => return Err("a non-str was used with %l".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '+' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x + y)), + _ => return Err("non-numbers on stack with +".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '-' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x - y)), + _ => return Err("non-numbers on stack with -".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '*' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x * y)), + _ => return Err("non-numbers on stack with *".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '/' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x / y)), + _ => return Err("non-numbers on stack with /".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + 'm' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x % y)), + _ => return Err("non-numbers on stack with %".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '&' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x & y)), + _ => return Err("non-numbers on stack with &".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '|' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x | y)), + _ => return Err("non-numbers on stack with |".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '^' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(x ^ y)), + _ => return Err("non-numbers on stack with ^".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '=' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(if x == y { 1 } + else { 0 })), + _ => return Err("non-numbers on stack with =".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '>' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(if x > y { 1 } + else { 0 })), + _ => return Err("non-numbers on stack with >".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '<' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(y), Number(x)) => stack.push(Number(if x < y { 1 } + else { 0 })), + _ => return Err("non-numbers on stack with <".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + 'A' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(0), Number(_)) => stack.push(Number(0)), + (Number(_), Number(0)) => stack.push(Number(0)), + (Number(_), Number(_)) => stack.push(Number(1)), + _ => return Err("non-numbers on stack with logical and".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + 'O' => if stack.len() > 1 { + match (stack.pop().unwrap(), stack.pop().unwrap()) { + (Number(0), Number(0)) => stack.push(Number(0)), + (Number(_), Number(_)) => stack.push(Number(1)), + _ => return Err("non-numbers on stack with logical or".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '!' => if stack.len() > 0 { + match stack.pop().unwrap() { + Number(0) => stack.push(Number(1)), + Number(_) => stack.push(Number(0)), + _ => return Err("non-number on stack with logical not".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + '~' => if stack.len() > 0 { + match stack.pop().unwrap() { + Number(x) => stack.push(Number(!x)), + _ => return Err("non-number on stack with %~".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + 'i' => match (mparams[0].clone(), mparams[1].clone()) { + (Number(x), Number(y)) => { + mparams[0] = Number(x+1); + mparams[1] = Number(y+1); + }, + (_, _) => return Err("first two params not numbers with %i".to_string()) + }, + + // printf-style support for %doxXs + 'd'|'o'|'x'|'X'|'s' => if stack.len() > 0 { + let flags = Flags::new(); + let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), flags); + if res.is_err() { return res } + output.extend(res.unwrap().iter().map(|x| *x)) + } else { return Err("stack is empty".to_string()) }, + ':'|'#'|' '|'.'|'0'...'9' => { + let mut flags = Flags::new(); + let mut fstate = FormatStateFlags; + match cur { + ':' => (), + '#' => flags.alternate = true, + ' ' => flags.space = true, + '.' => fstate = FormatStatePrecision, + '0'...'9' => { + flags.width = cur as usize - '0' as usize; + fstate = FormatStateWidth; + } + _ => unreachable!() + } + state = FormatPattern(flags, fstate); + } + + // conditionals + '?' => (), + 't' => if stack.len() > 0 { + match stack.pop().unwrap() { + Number(0) => state = SeekIfElse(0), + Number(_) => (), + _ => return Err("non-number on stack \ + with conditional".to_string()) + } + } else { return Err("stack is empty".to_string()) }, + 'e' => state = SeekIfEnd(0), + ';' => (), + + _ => { + return Err(format!("unrecognized format option {}", cur)) + } + } + }, + PushParam => { + // params are 1-indexed + stack.push(mparams[match cur.to_digit(10) { + Some(d) => d as usize - 1, + None => return Err("bad param number".to_string()) + }].clone()); + }, + SetVar => { + if cur >= 'A' && cur <= 'Z' { + if stack.len() > 0 { + let idx = (cur as u8) - b'A'; + vars.sta[idx as usize] = stack.pop().unwrap(); + } else { return Err("stack is empty".to_string()) } + } else if cur >= 'a' && cur <= 'z' { + if stack.len() > 0 { + let idx = (cur as u8) - b'a'; + vars.dyn[idx as usize] = stack.pop().unwrap(); + } else { return Err("stack is empty".to_string()) } + } else { + return Err("bad variable name in %P".to_string()); + } + }, + GetVar => { + if cur >= 'A' && cur <= 'Z' { + let idx = (cur as u8) - b'A'; + stack.push(vars.sta[idx as usize].clone()); + } else if cur >= 'a' && cur <= 'z' { + let idx = (cur as u8) - b'a'; + stack.push(vars.dyn[idx as usize].clone()); + } else { + return Err("bad variable name in %g".to_string()); + } + }, + CharConstant => { + stack.push(Number(c as i16)); + state = CharClose; + }, + CharClose => { + if cur != '\'' { + return Err("malformed character constant".to_string()); + } + }, + IntConstant(i) => { + if cur == '}' { + stack.push(Number(i)); + state = Nothing; + } else if let Some(digit) = cur.to_digit(10) { + match i.checked_mul(10).and_then(|i_ten|i_ten.checked_add(digit as i16)) { + Some(i) => { + state = IntConstant(i); + old_state = Nothing; + } + None => return Err("int constant too large".to_string()) + } + } else { + return Err("bad int constant".to_string()); + } + } + FormatPattern(ref mut flags, ref mut fstate) => { + old_state = Nothing; + match (*fstate, cur) { + (_,'d')|(_,'o')|(_,'x')|(_,'X')|(_,'s') => if stack.len() > 0 { + let res = format(stack.pop().unwrap(), FormatOp::from_char(cur), *flags); + if res.is_err() { return res } + output.extend(res.unwrap().iter().map(|x| *x)); + // will cause state to go to Nothing + old_state = FormatPattern(*flags, *fstate); + } else { return Err("stack is empty".to_string()) }, + (FormatStateFlags,'#') => { + flags.alternate = true; + } + (FormatStateFlags,'-') => { + flags.left = true; + } + (FormatStateFlags,'+') => { + flags.sign = true; + } + (FormatStateFlags,' ') => { + flags.space = true; + } + (FormatStateFlags,'0'...'9') => { + flags.width = cur as usize - '0' as usize; + *fstate = FormatStateWidth; + } + (FormatStateFlags,'.') => { + *fstate = FormatStatePrecision; + } + (FormatStateWidth,'0'...'9') => { + let old = flags.width; + flags.width = flags.width * 10 + (cur as usize - '0' as usize); + if flags.width < old { return Err("format width overflow".to_string()) } + } + (FormatStateWidth,'.') => { + *fstate = FormatStatePrecision; + } + (FormatStatePrecision,'0'...'9') => { + let old = flags.precision; + flags.precision = flags.precision * 10 + (cur as usize - '0' as usize); + if flags.precision < old { + return Err("format precision overflow".to_string()) + } + } + _ => return Err("invalid format specifier".to_string()) + } + } + SeekIfElse(level) => { + if cur == '%' { + state = SeekIfElsePercent(level); + } + old_state = Nothing; + } + SeekIfElsePercent(level) => { + if cur == ';' { + if level == 0 { + state = Nothing; + } else { + state = SeekIfElse(level-1); + } + } else if cur == 'e' && level == 0 { + state = Nothing; + } else if cur == '?' { + state = SeekIfElse(level+1); + } else { + state = SeekIfElse(level); + } + } + SeekIfEnd(level) => { + if cur == '%' { + state = SeekIfEndPercent(level); + } + old_state = Nothing; + } + SeekIfEndPercent(level) => { + if cur == ';' { + if level == 0 { + state = Nothing; + } else { + state = SeekIfEnd(level-1); + } + } else if cur == '?' { + state = SeekIfEnd(level+1); + } else { + state = SeekIfEnd(level); + } + } + } + if state == old_state { + state = Nothing; + } + } + Ok(output) +} + +#[derive(Copy, PartialEq, Clone)] +struct Flags { + width: usize, + precision: usize, + alternate: bool, + left: bool, + sign: bool, + space: bool +} + +impl Flags { + fn new() -> Flags { + Flags{ width: 0, precision: 0, alternate: false, + left: false, sign: false, space: false } + } +} + +#[derive(Copy, Clone)] +enum FormatOp { + FormatDigit, + FormatOctal, + FormatHex, + FormatHEX, + FormatString +} + +impl FormatOp { + fn from_char(c: char) -> FormatOp { + match c { + 'd' => FormatDigit, + 'o' => FormatOctal, + 'x' => FormatHex, + 'X' => FormatHEX, + 's' => FormatString, + _ => panic!("bad FormatOp char") + } + } + fn to_char(self) -> char { + match self { + FormatDigit => 'd', + FormatOctal => 'o', + FormatHex => 'x', + FormatHEX => 'X', + FormatString => 's' + } + } +} + +fn format(val: Param, op: FormatOp, flags: Flags) -> Result ,String> { + let mut s = match val { + Number(d) => { + let s = match (op, flags.sign) { + (FormatDigit, true) => format!("{:+}", d), + (FormatDigit, false) => format!("{}", d), + (FormatOctal, _) => format!("{:o}", d), + (FormatHex, _) => format!("{:x}", d), + (FormatHEX, _) => format!("{:X}", d), + (FormatString, _) => return Err("non-number on stack with %s".to_string()) + }; + + let mut s: Vec = s.into_bytes().into_iter().collect(); + if flags.precision > s.len() { + let mut s_ = Vec::with_capacity(flags.precision); + let n = flags.precision - s.len(); + s_.extend(repeat(b'0').take(n)); + s_.extend(s.into_iter()); + s = s_; + } + assert!(!s.is_empty(), "string conversion produced empty result"); + match op { + FormatDigit => { + if flags.space && !(s[0] == b'-' || s[0] == b'+' ) { + s.insert(0, b' '); + } + } + FormatOctal => { + if flags.alternate && s[0] != b'0' { + s.insert(0, b'0'); + } + } + FormatHex => { + if flags.alternate { + let s_ = replace(&mut s, vec!(b'0', b'x')); + s.extend(s_.into_iter()); + } + } + FormatHEX => { + s = s.to_ascii_uppercase(); + if flags.alternate { + let s_ = replace(&mut s, vec!(b'0', b'X')); + s.extend(s_.into_iter()); + } + } + FormatString => unreachable!() + } + s + } + Words(s) => { + match op { + FormatString => { + let mut s = s.as_bytes().to_vec(); + if flags.precision > 0 && flags.precision < s.len() { + s.truncate(flags.precision); + } + s + } + _ => { + return Err(format!("non-string on stack with %{}", + op.to_char())) + } + } + } + }; + if flags.width > s.len() { + let n = flags.width - s.len(); + if flags.left { + s.extend(repeat(b' ').take(n)); + } else { + let mut s_ = Vec::with_capacity(flags.width); + s_.extend(repeat(b' ').take(n)); + s_.extend(s.into_iter()); + s = s_; + } + } + Ok(s) +} + +#[cfg(test)] +mod test { + use super::{expand, Variables}; + use super::Param::{self, Words, Number}; + use std::result::Result::Ok; + + #[test] + fn test_basic_setabf() { + let s = b"\\E[48;5;%p1%dm"; + assert_eq!(expand(s, &[Number(1)], &mut Variables::new()).unwrap(), + "\\E[48;5;1m".bytes().collect::>()); + } + + #[test] + fn test_multiple_int_constants() { + assert_eq!(expand(b"%{1}%{2}%d%d", &[], &mut Variables::new()).unwrap(), + "21".bytes().collect::>()); + } + + #[test] + fn test_op_i() { + let mut vars = Variables::new(); + assert_eq!(expand(b"%p1%d%p2%d%p3%d%i%p1%d%p2%d%p3%d", + &[Number(1),Number(2),Number(3)], &mut vars), + Ok("123233".bytes().collect::>())); + assert_eq!(expand(b"%p1%d%p2%d%i%p1%d%p2%d", &[], &mut vars), + Ok("0011".bytes().collect::>())); + } + + #[test] + fn test_param_stack_failure_conditions() { + let mut varstruct = Variables::new(); + let vars = &mut varstruct; + fn get_res(fmt: &str, cap: &str, params: &[Param], vars: &mut Variables) -> + Result, String> + { + let mut u8v: Vec<_> = fmt.bytes().collect(); + u8v.extend(cap.as_bytes().iter().map(|&b| b)); + expand(&u8v, params, vars) + } + + let caps = ["%d", "%c", "%s", "%Pa", "%l", "%!", "%~"]; + for &cap in caps.iter() { + let res = get_res("", cap, &[], vars); + assert!(res.is_err(), + "Op {} succeeded incorrectly with 0 stack entries", cap); + let p = if cap == "%s" || cap == "%l" { + Words("foo".to_string()) + } else { + Number(97) + }; + let res = get_res("%p1", cap, &[p], vars); + assert!(res.is_ok(), + "Op {} failed with 1 stack entry: {}", cap, res.err().unwrap()); + } + let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"]; + for &cap in caps.iter() { + let res = expand(cap.as_bytes(), &[], vars); + assert!(res.is_err(), + "Binop {} succeeded incorrectly with 0 stack entries", cap); + let res = get_res("%{1}", cap, &[], vars); + assert!(res.is_err(), + "Binop {} succeeded incorrectly with 1 stack entry", cap); + let res = get_res("%{1}%{2}", cap, &[], vars); + assert!(res.is_ok(), + "Binop {} failed with 2 stack entries: {}", cap, res.err().unwrap()); + } + } + + #[test] + fn test_push_bad_param() { + assert!(expand(b"%pa", &[], &mut Variables::new()).is_err()); + } + + #[test] + fn test_comparison_ops() { + let v = [('<', [1u8, 0u8, 0u8]), ('=', [0u8, 1u8, 0u8]), ('>', [0u8, 0u8, 1u8])]; + for &(op, bs) in v.iter() { + let s = format!("%{{1}}%{{2}}%{}%d", op); + let res = expand(s.as_bytes(), &[], &mut Variables::new()); + assert!(res.is_ok(), res.err().unwrap()); + assert_eq!(res.unwrap(), vec!(b'0' + bs[0])); + let s = format!("%{{1}}%{{1}}%{}%d", op); + let res = expand(s.as_bytes(), &[], &mut Variables::new()); + assert!(res.is_ok(), res.err().unwrap()); + assert_eq!(res.unwrap(), vec!(b'0' + bs[1])); + let s = format!("%{{2}}%{{1}}%{}%d", op); + let res = expand(s.as_bytes(), &[], &mut Variables::new()); + assert!(res.is_ok(), res.err().unwrap()); + assert_eq!(res.unwrap(), vec!(b'0' + bs[2])); + } + } + + #[test] + fn test_conditionals() { + let mut vars = Variables::new(); + let s = b"\\E[%?%p1%{8}%<%t3%p1%d%e%p1%{16}%<%t9%p1%{8}%-%d%e38;5;%p1%d%;m"; + let res = expand(s, &[Number(1)], &mut vars); + assert!(res.is_ok(), res.err().unwrap()); + assert_eq!(res.unwrap(), + "\\E[31m".bytes().collect::>()); + let res = expand(s, &[Number(8)], &mut vars); + assert!(res.is_ok(), res.err().unwrap()); + assert_eq!(res.unwrap(), + "\\E[90m".bytes().collect::>()); + let res = expand(s, &[Number(42)], &mut vars); + assert!(res.is_ok(), res.err().unwrap()); + assert_eq!(res.unwrap(), + "\\E[38;5;42m".bytes().collect::>()); + } + + #[test] + fn test_format() { + let mut varstruct = Variables::new(); + let vars = &mut varstruct; + assert_eq!(expand(b"%p1%s%p2%2s%p3%2s%p4%.2s", + &[Words("foo".to_string()), + Words("foo".to_string()), + Words("f".to_string()), + Words("foo".to_string())], vars), + Ok("foofoo ffo".bytes().collect::>())); + assert_eq!(expand(b"%p1%:-4.2s", &[Words("foo".to_string())], vars), + Ok("fo ".bytes().collect::>())); + + assert_eq!(expand(b"%p1%d%p1%.3d%p1%5d%p1%:+d", &[Number(1)], vars), + Ok("1001 1+1".bytes().collect::>())); + assert_eq!(expand(b"%p1%o%p1%#o%p2%6.4x%p2%#6.4X", &[Number(15), Number(27)], vars), + Ok("17017 001b0X001B".bytes().collect::>())); + } +} diff --git a/src/terminfo/parser/compiled.rs b/src/terminfo/parser/compiled.rs new file mode 100644 index 0000000000000..2cf4a4ecb1897 --- /dev/null +++ b/src/terminfo/parser/compiled.rs @@ -0,0 +1,359 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(non_upper_case_globals)] + +//! ncurses-compatible compiled terminfo format parsing (term(5)) + +use std::collections::HashMap; +use std::io::prelude::*; +use std::io; +use super::super::TermInfo; + +// These are the orders ncurses uses in its compiled format (as of 5.9). Not sure if portable. + +pub static boolfnames: &'static[&'static str] = &["auto_left_margin", "auto_right_margin", + "no_esc_ctlc", "ceol_standout_glitch", "eat_newline_glitch", "erase_overstrike", "generic_type", + "hard_copy", "has_meta_key", "has_status_line", "insert_null_glitch", "memory_above", + "memory_below", "move_insert_mode", "move_standout_mode", "over_strike", "status_line_esc_ok", + "dest_tabs_magic_smso", "tilde_glitch", "transparent_underline", "xon_xoff", "needs_xon_xoff", + "prtr_silent", "hard_cursor", "non_rev_rmcup", "no_pad_char", "non_dest_scroll_region", + "can_change", "back_color_erase", "hue_lightness_saturation", "col_addr_glitch", + "cr_cancels_micro_mode", "has_print_wheel", "row_addr_glitch", "semi_auto_right_margin", + "cpi_changes_res", "lpi_changes_res", "backspaces_with_bs", "crt_no_scrolling", + "no_correctly_working_cr", "gnu_has_meta_key", "linefeed_is_newline", "has_hardware_tabs", + "return_does_clr_eol"]; + +pub static boolnames: &'static[&'static str] = &["bw", "am", "xsb", "xhp", "xenl", "eo", + "gn", "hc", "km", "hs", "in", "db", "da", "mir", "msgr", "os", "eslok", "xt", "hz", "ul", "xon", + "nxon", "mc5i", "chts", "nrrmc", "npc", "ndscr", "ccc", "bce", "hls", "xhpa", "crxm", "daisy", + "xvpa", "sam", "cpix", "lpix", "OTbs", "OTns", "OTnc", "OTMT", "OTNL", "OTpt", "OTxr"]; + +pub static numfnames: &'static[&'static str] = &[ "columns", "init_tabs", "lines", + "lines_of_memory", "magic_cookie_glitch", "padding_baud_rate", "virtual_terminal", + "width_status_line", "num_labels", "label_height", "label_width", "max_attributes", + "maximum_windows", "max_colors", "max_pairs", "no_color_video", "buffer_capacity", + "dot_vert_spacing", "dot_horz_spacing", "max_micro_address", "max_micro_jump", "micro_col_size", + "micro_line_size", "number_of_pins", "output_res_char", "output_res_line", + "output_res_horz_inch", "output_res_vert_inch", "print_rate", "wide_char_size", "buttons", + "bit_image_entwining", "bit_image_type", "magic_cookie_glitch_ul", "carriage_return_delay", + "new_line_delay", "backspace_delay", "horizontal_tab_delay", "number_of_function_keys"]; + +pub static numnames: &'static[&'static str] = &[ "cols", "it", "lines", "lm", "xmc", "pb", + "vt", "wsl", "nlab", "lh", "lw", "ma", "wnum", "colors", "pairs", "ncv", "bufsz", "spinv", + "spinh", "maddr", "mjump", "mcs", "mls", "npins", "orc", "orl", "orhi", "orvi", "cps", "widcs", + "btns", "bitwin", "bitype", "UTug", "OTdC", "OTdN", "OTdB", "OTdT", "OTkn"]; + +pub static stringfnames: &'static[&'static str] = &[ "back_tab", "bell", "carriage_return", + "change_scroll_region", "clear_all_tabs", "clear_screen", "clr_eol", "clr_eos", + "column_address", "command_character", "cursor_address", "cursor_down", "cursor_home", + "cursor_invisible", "cursor_left", "cursor_mem_address", "cursor_normal", "cursor_right", + "cursor_to_ll", "cursor_up", "cursor_visible", "delete_character", "delete_line", + "dis_status_line", "down_half_line", "enter_alt_charset_mode", "enter_blink_mode", + "enter_bold_mode", "enter_ca_mode", "enter_delete_mode", "enter_dim_mode", "enter_insert_mode", + "enter_secure_mode", "enter_protected_mode", "enter_reverse_mode", "enter_standout_mode", + "enter_underline_mode", "erase_chars", "exit_alt_charset_mode", "exit_attribute_mode", + "exit_ca_mode", "exit_delete_mode", "exit_insert_mode", "exit_standout_mode", + "exit_underline_mode", "flash_screen", "form_feed", "from_status_line", "init_1string", + "init_2string", "init_3string", "init_file", "insert_character", "insert_line", + "insert_padding", "key_backspace", "key_catab", "key_clear", "key_ctab", "key_dc", "key_dl", + "key_down", "key_eic", "key_eol", "key_eos", "key_f0", "key_f1", "key_f10", "key_f2", "key_f3", + "key_f4", "key_f5", "key_f6", "key_f7", "key_f8", "key_f9", "key_home", "key_ic", "key_il", + "key_left", "key_ll", "key_npage", "key_ppage", "key_right", "key_sf", "key_sr", "key_stab", + "key_up", "keypad_local", "keypad_xmit", "lab_f0", "lab_f1", "lab_f10", "lab_f2", "lab_f3", + "lab_f4", "lab_f5", "lab_f6", "lab_f7", "lab_f8", "lab_f9", "meta_off", "meta_on", "newline", + "pad_char", "parm_dch", "parm_delete_line", "parm_down_cursor", "parm_ich", "parm_index", + "parm_insert_line", "parm_left_cursor", "parm_right_cursor", "parm_rindex", "parm_up_cursor", + "pkey_key", "pkey_local", "pkey_xmit", "print_screen", "prtr_off", "prtr_on", "repeat_char", + "reset_1string", "reset_2string", "reset_3string", "reset_file", "restore_cursor", + "row_address", "save_cursor", "scroll_forward", "scroll_reverse", "set_attributes", "set_tab", + "set_window", "tab", "to_status_line", "underline_char", "up_half_line", "init_prog", "key_a1", + "key_a3", "key_b2", "key_c1", "key_c3", "prtr_non", "char_padding", "acs_chars", "plab_norm", + "key_btab", "enter_xon_mode", "exit_xon_mode", "enter_am_mode", "exit_am_mode", "xon_character", + "xoff_character", "ena_acs", "label_on", "label_off", "key_beg", "key_cancel", "key_close", + "key_command", "key_copy", "key_create", "key_end", "key_enter", "key_exit", "key_find", + "key_help", "key_mark", "key_message", "key_move", "key_next", "key_open", "key_options", + "key_previous", "key_print", "key_redo", "key_reference", "key_refresh", "key_replace", + "key_restart", "key_resume", "key_save", "key_suspend", "key_undo", "key_sbeg", "key_scancel", + "key_scommand", "key_scopy", "key_screate", "key_sdc", "key_sdl", "key_select", "key_send", + "key_seol", "key_sexit", "key_sfind", "key_shelp", "key_shome", "key_sic", "key_sleft", + "key_smessage", "key_smove", "key_snext", "key_soptions", "key_sprevious", "key_sprint", + "key_sredo", "key_sreplace", "key_sright", "key_srsume", "key_ssave", "key_ssuspend", + "key_sundo", "req_for_input", "key_f11", "key_f12", "key_f13", "key_f14", "key_f15", "key_f16", + "key_f17", "key_f18", "key_f19", "key_f20", "key_f21", "key_f22", "key_f23", "key_f24", + "key_f25", "key_f26", "key_f27", "key_f28", "key_f29", "key_f30", "key_f31", "key_f32", + "key_f33", "key_f34", "key_f35", "key_f36", "key_f37", "key_f38", "key_f39", "key_f40", + "key_f41", "key_f42", "key_f43", "key_f44", "key_f45", "key_f46", "key_f47", "key_f48", + "key_f49", "key_f50", "key_f51", "key_f52", "key_f53", "key_f54", "key_f55", "key_f56", + "key_f57", "key_f58", "key_f59", "key_f60", "key_f61", "key_f62", "key_f63", "clr_bol", + "clear_margins", "set_left_margin", "set_right_margin", "label_format", "set_clock", + "display_clock", "remove_clock", "create_window", "goto_window", "hangup", "dial_phone", + "quick_dial", "tone", "pulse", "flash_hook", "fixed_pause", "wait_tone", "user0", "user1", + "user2", "user3", "user4", "user5", "user6", "user7", "user8", "user9", "orig_pair", + "orig_colors", "initialize_color", "initialize_pair", "set_color_pair", "set_foreground", + "set_background", "change_char_pitch", "change_line_pitch", "change_res_horz", + "change_res_vert", "define_char", "enter_doublewide_mode", "enter_draft_quality", + "enter_italics_mode", "enter_leftward_mode", "enter_micro_mode", "enter_near_letter_quality", + "enter_normal_quality", "enter_shadow_mode", "enter_subscript_mode", "enter_superscript_mode", + "enter_upward_mode", "exit_doublewide_mode", "exit_italics_mode", "exit_leftward_mode", + "exit_micro_mode", "exit_shadow_mode", "exit_subscript_mode", "exit_superscript_mode", + "exit_upward_mode", "micro_column_address", "micro_down", "micro_left", "micro_right", + "micro_row_address", "micro_up", "order_of_pins", "parm_down_micro", "parm_left_micro", + "parm_right_micro", "parm_up_micro", "select_char_set", "set_bottom_margin", + "set_bottom_margin_parm", "set_left_margin_parm", "set_right_margin_parm", "set_top_margin", + "set_top_margin_parm", "start_bit_image", "start_char_set_def", "stop_bit_image", + "stop_char_set_def", "subscript_characters", "superscript_characters", "these_cause_cr", + "zero_motion", "char_set_names", "key_mouse", "mouse_info", "req_mouse_pos", "get_mouse", + "set_a_foreground", "set_a_background", "pkey_plab", "device_type", "code_set_init", + "set0_des_seq", "set1_des_seq", "set2_des_seq", "set3_des_seq", "set_lr_margin", + "set_tb_margin", "bit_image_repeat", "bit_image_newline", "bit_image_carriage_return", + "color_names", "define_bit_image_region", "end_bit_image_region", "set_color_band", + "set_page_length", "display_pc_char", "enter_pc_charset_mode", "exit_pc_charset_mode", + "enter_scancode_mode", "exit_scancode_mode", "pc_term_options", "scancode_escape", + "alt_scancode_esc", "enter_horizontal_hl_mode", "enter_left_hl_mode", "enter_low_hl_mode", + "enter_right_hl_mode", "enter_top_hl_mode", "enter_vertical_hl_mode", "set_a_attributes", + "set_pglen_inch", "termcap_init2", "termcap_reset", "linefeed_if_not_lf", "backspace_if_not_bs", + "other_non_function_keys", "arrow_key_map", "acs_ulcorner", "acs_llcorner", "acs_urcorner", + "acs_lrcorner", "acs_ltee", "acs_rtee", "acs_btee", "acs_ttee", "acs_hline", "acs_vline", + "acs_plus", "memory_lock", "memory_unlock", "box_chars_1"]; + +pub static stringnames: &'static[&'static str] = &[ "cbt", "_", "cr", "csr", "tbc", "clear", + "_", "_", "hpa", "cmdch", "cup", "cud1", "home", "civis", "cub1", "mrcup", "cnorm", "cuf1", + "ll", "cuu1", "cvvis", "dch1", "dl1", "dsl", "hd", "smacs", "blink", "bold", "smcup", "smdc", + "dim", "smir", "invis", "prot", "rev", "smso", "smul", "ech", "rmacs", "sgr0", "rmcup", "rmdc", + "rmir", "rmso", "rmul", "flash", "ff", "fsl", "is1", "is2", "is3", "if", "ich1", "il1", "ip", + "kbs", "ktbc", "kclr", "kctab", "_", "_", "kcud1", "_", "_", "_", "_", "_", "_", "_", "_", "_", + "_", "_", "_", "_", "_", "khome", "_", "_", "kcub1", "_", "knp", "kpp", "kcuf1", "_", "_", + "khts", "_", "rmkx", "smkx", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "rmm", "_", + "_", "pad", "dch", "dl", "cud", "ich", "indn", "il", "cub", "cuf", "rin", "cuu", "pfkey", + "pfloc", "pfx", "mc0", "mc4", "_", "rep", "rs1", "rs2", "rs3", "rf", "rc", "vpa", "sc", "ind", + "ri", "sgr", "_", "wind", "_", "tsl", "uc", "hu", "iprog", "_", "_", "_", "_", "_", "mc5p", + "rmp", "acsc", "pln", "kcbt", "smxon", "rmxon", "smam", "rmam", "xonc", "xoffc", "_", "smln", + "rmln", "_", "kcan", "kclo", "kcmd", "kcpy", "kcrt", "_", "kent", "kext", "kfnd", "khlp", + "kmrk", "kmsg", "kmov", "knxt", "kopn", "kopt", "kprv", "kprt", "krdo", "kref", "krfr", "krpl", + "krst", "kres", "ksav", "kspd", "kund", "kBEG", "kCAN", "kCMD", "kCPY", "kCRT", "_", "_", + "kslt", "kEND", "kEOL", "kEXT", "kFND", "kHLP", "kHOM", "_", "kLFT", "kMSG", "kMOV", "kNXT", + "kOPT", "kPRV", "kPRT", "kRDO", "kRPL", "kRIT", "kRES", "kSAV", "kSPD", "kUND", "rfi", "_", "_", + "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", + "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", + "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", "_", + "dclk", "rmclk", "cwin", "wingo", "_", "dial", "qdial", "_", "_", "hook", "pause", "wait", "_", + "_", "_", "_", "_", "_", "_", "_", "_", "_", "op", "oc", "initc", "initp", "scp", "setf", + "setb", "cpi", "lpi", "chr", "cvr", "defc", "swidm", "sdrfq", "sitm", "slm", "smicm", "snlq", + "snrmq", "sshm", "ssubm", "ssupm", "sum", "rwidm", "ritm", "rlm", "rmicm", "rshm", "rsubm", + "rsupm", "rum", "mhpa", "mcud1", "mcub1", "mcuf1", "mvpa", "mcuu1", "porder", "mcud", "mcub", + "mcuf", "mcuu", "scs", "smgb", "smgbp", "smglp", "smgrp", "smgt", "smgtp", "sbim", "scsd", + "rbim", "rcsd", "subcs", "supcs", "docr", "zerom", "csnm", "kmous", "minfo", "reqmp", "getm", + "setaf", "setab", "pfxl", "devt", "csin", "s0ds", "s1ds", "s2ds", "s3ds", "smglr", "smgtb", + "birep", "binel", "bicr", "colornm", "defbi", "endbi", "setcolor", "slines", "dispc", "smpch", + "rmpch", "smsc", "rmsc", "pctrm", "scesc", "scesa", "ehhlm", "elhlm", "elohlm", "erhlm", + "ethlm", "evhlm", "sgr1", "slength", "OTi2", "OTrs", "OTnl", "OTbs", "OTko", "OTma", "OTG2", + "OTG3", "OTG1", "OTG4", "OTGR", "OTGL", "OTGU", "OTGD", "OTGH", "OTGV", "OTGC", "meml", "memu", + "box1"]; + +fn read_le_u16(r: &mut io::Read) -> io::Result { + let mut b = [0; 2]; + let mut amt = 0; + while amt < b.len() { + match try!(r.read(&mut b[amt..])) { + 0 => return Err(io::Error::new(io::ErrorKind::Other, "end of file")), + n => amt += n, + } + } + Ok((b[0] as u16) | ((b[1] as u16) << 8)) +} + +fn read_byte(r: &mut io::Read) -> io::Result { + match r.bytes().next() { + Some(s) => s, + None => Err(io::Error::new(io::ErrorKind::Other, "end of file")) + } +} + +/// Parse a compiled terminfo entry, using long capability names if `longnames` +/// is true +pub fn parse(file: &mut io::Read, longnames: bool) -> Result { + macro_rules! try( ($e:expr) => ( + match $e { + Ok(e) => e, + Err(e) => return Err(format!("{}", e)) + } + ) ); + + let (bnames, snames, nnames) = if longnames { + (boolfnames, stringfnames, numfnames) + } else { + (boolnames, stringnames, numnames) + }; + + // Check magic number + let magic = try!(read_le_u16(file)); + if magic != 0x011A { + return Err(format!("invalid magic number: expected {:x}, found {:x}", + 0x011A, magic)); + } + + // According to the spec, these fields must be >= -1 where -1 means that the feature is not + // supported. Using 0 instead of -1 works because we skip sections with length 0. + macro_rules! read_nonneg { + () => {{ + match try!(read_le_u16(file)) as i16 { + n if n >= 0 => n as usize, + -1 => 0, + _ => return Err("incompatible file: length fields must be >= -1".to_string()), + } + }} + } + + let names_bytes = read_nonneg!(); + let bools_bytes = read_nonneg!(); + let numbers_count = read_nonneg!(); + let string_offsets_count = read_nonneg!(); + let string_table_bytes = read_nonneg!(); + + if names_bytes == 0 { + return Err("incompatible file: names field must be \ + at least 1 byte wide".to_string()); + } + + if bools_bytes > boolnames.len() { + return Err("incompatible file: more booleans than \ + expected".to_string()); + } + + if numbers_count > numnames.len() { + return Err("incompatible file: more numbers than \ + expected".to_string()); + } + + if string_offsets_count > stringnames.len() { + return Err("incompatible file: more string offsets than \ + expected".to_string()); + } + + // don't read NUL + let mut bytes = Vec::new(); + try!(file.take((names_bytes - 1) as u64).read_to_end(&mut bytes)); + let names_str = match String::from_utf8(bytes) { + Ok(s) => s, + Err(_) => return Err("input not utf-8".to_string()), + }; + + let term_names: Vec = names_str.split('|') + .map(|s| s.to_string()) + .collect(); + // consume NUL + if try!(read_byte(file)) != b'\0' { + return Err("incompatible file: missing null terminator \ + for names section".to_string()); + } + + let bools_map: HashMap = try!( + (0..bools_bytes).filter_map(|i| match read_byte(file) { + Err(e) => Some(Err(e)), + Ok(1) => Some(Ok((bnames[i].to_string(), true))), + Ok(_) => None + }).collect()); + + if (bools_bytes + names_bytes) % 2 == 1 { + try!(read_byte(file)); // compensate for padding + } + + let numbers_map: HashMap = try!( + (0..numbers_count).filter_map(|i| match read_le_u16(file) { + Ok(0xFFFF) => None, + Ok(n) => Some(Ok((nnames[i].to_string(), n))), + Err(e) => Some(Err(e)) + }).collect()); + + let string_map: HashMap> = if string_offsets_count > 0 { + let string_offsets: Vec = try!((0..string_offsets_count).map(|_| { + read_le_u16(file) + }).collect()); + + let mut string_table = Vec::new(); + try!(file.take(string_table_bytes as u64).read_to_end(&mut string_table)); + + try!(string_offsets.into_iter().enumerate().filter(|&(_, offset)| { + // non-entry + offset != 0xFFFF + }).map(|(i, offset)| { + let offset = offset as usize; + + let name = if snames[i] == "_" { + stringfnames[i] + } else { + snames[i] + }; + + if offset == 0xFFFE { + // undocumented: FFFE indicates cap@, which means the capability is not present + // unsure if the handling for this is correct + return Ok((name.to_string(), Vec::new())); + } + + // Find the offset of the NUL we want to go to + let nulpos = string_table[offset..string_table_bytes].iter().position(|&b| b == 0); + match nulpos { + Some(len) => Ok((name.to_string(), string_table[offset..offset + len].to_vec())), + None => Err("invalid file: missing NUL in string_table".to_string()), + } + }).collect()) + } else { + HashMap::new() + }; + + // And that's all there is to it + Ok(TermInfo { + names: term_names, + bools: bools_map, + numbers: numbers_map, + strings: string_map + }) +} + +/// Create a dummy TermInfo struct for msys terminals +pub fn msys_terminfo() -> TermInfo { + let mut strings = HashMap::new(); + strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec()); + strings.insert("bold".to_string(), b"\x1B[1m".to_vec()); + strings.insert("setaf".to_string(), b"\x1B[3%p1%dm".to_vec()); + strings.insert("setab".to_string(), b"\x1B[4%p1%dm".to_vec()); + + let mut numbers = HashMap::new(); + numbers.insert("colors".to_string(), 8u16); + + TermInfo { + names: vec!("cygwin".to_string()), // msys is a fork of an older cygwin version + bools: HashMap::new(), + numbers: numbers, + strings: strings + } +} + +#[cfg(test)] +mod test { + + use super::{boolnames, boolfnames, numnames, numfnames, stringnames, stringfnames}; + + #[test] + fn test_veclens() { + assert_eq!(boolfnames.len(), boolnames.len()); + assert_eq!(numfnames.len(), numnames.len()); + assert_eq!(stringfnames.len(), stringnames.len()); + } + + #[test] + #[ignore(reason = "no ncurses on buildbots, needs a bundled terminfo file to test against")] + fn test_parse() { + // FIXME #6870: Distribute a compiled file in src/tests and test there + // parse(io::fs_reader(&p("/usr/share/terminfo/r/rxvt-256color")).unwrap(), false); + } +} diff --git a/src/terminfo/searcher.rs b/src/terminfo/searcher.rs new file mode 100644 index 0000000000000..207d5e0b281c7 --- /dev/null +++ b/src/terminfo/searcher.rs @@ -0,0 +1,79 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! ncurses-compatible database discovery +//! +//! Does not support hashed database, only filesystem! + +use std::env; +use std::fs; +use std::path::PathBuf; + +/// Return path to database entry for `term` +pub fn get_dbpath_for_term(term: &str) -> Option { + if term.len() == 0 { + return None; + } + + let mut dirs_to_search = Vec::new(); + let first_char = term.chars().next().unwrap(); + + // Find search directory + match env::var_os("TERMINFO") { + Some(dir) => dirs_to_search.push(PathBuf::from(dir)), + None => { + if let Some(mut homedir) = env::home_dir() { + // ncurses compatibility; + homedir.push(".terminfo"); + dirs_to_search.push(homedir) + } + match env::var("TERMINFO_DIRS") { + Ok(dirs) => for i in dirs.split(':') { + if i == "" { + dirs_to_search.push(PathBuf::from("/usr/share/terminfo")); + } else { + dirs_to_search.push(PathBuf::from(i)); + } + }, + // Found nothing in TERMINFO_DIRS, use the default paths: + // According to /etc/terminfo/README, after looking at + // ~/.terminfo, ncurses will search /etc/terminfo, then + // /lib/terminfo, and eventually /usr/share/terminfo. + Err(..) => { + dirs_to_search.push(PathBuf::from("/etc/terminfo")); + dirs_to_search.push(PathBuf::from("/lib/terminfo")); + dirs_to_search.push(PathBuf::from("/usr/share/terminfo")); + } + } + } + }; + + // Look for the terminal in all of the search directories + for mut p in dirs_to_search { + if fs::metadata(&p).is_ok() { + p.push(&first_char.to_string()); + p.push(&term); + if fs::metadata(&p).is_ok() { + return Some(p); + } + p.pop(); + p.pop(); + + // on some installations the dir is named after the hex of the char + // (e.g. OS X) + p.push(&format!("{:x}", first_char as usize)); + p.push(term); + if fs::metadata(&p).is_ok() { + return Some(p); + } + } + } + None +} diff --git a/src/win.rs b/src/win.rs new file mode 100644 index 0000000000000..694d794ad4227 --- /dev/null +++ b/src/win.rs @@ -0,0 +1,274 @@ +// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Windows console handling + +// FIXME (#13400): this is only a tiny fraction of the Windows console api + +extern crate kernel32; +extern crate winapi; + +use std::ffi::OsStr; +use std::io::prelude::*; +use std::io; +use std::os::windows::ffi::OsStrExt; +use std::ptr; + +use Attr; +use color; +use Terminal; + +/// A Terminal implementation which uses the Win32 Console API. +pub struct WinConsole { + buf: T, + def_foreground: color::Color, + def_background: color::Color, + foreground: color::Color, + background: color::Color, +} + +fn color_to_bits(color: color::Color) -> u16 { + // magic numbers from mingw-w64's wincon.h + + let bits = match color % 8 { + color::BLACK => 0, + color::BLUE => 0x1, + color::GREEN => 0x2, + color::RED => 0x4, + color::YELLOW => 0x2 | 0x4, + color::MAGENTA => 0x1 | 0x4, + color::CYAN => 0x1 | 0x2, + color::WHITE => 0x1 | 0x2 | 0x4, + _ => unreachable!() + }; + + if color >= 8 { + bits | 0x8 + } else { + bits + } +} + +fn bits_to_color(bits: u16) -> color::Color { + let color = match bits & 0x7 { + 0 => color::BLACK, + 0x1 => color::BLUE, + 0x2 => color::GREEN, + 0x4 => color::RED, + 0x6 => color::YELLOW, + 0x5 => color::MAGENTA, + 0x3 => color::CYAN, + 0x7 => color::WHITE, + _ => unreachable!() + }; + + color | (bits & 0x8) // copy the hi-intensity bit +} + +// Just get a handle to the current console buffer whatever it is +fn conout() -> io::Result { + let name: &OsStr = "CONOUT$\0".as_ref(); + let name: Vec = name.encode_wide().collect(); + let handle = unsafe { + kernel32::CreateFileW( + name.as_ptr(), + winapi::GENERIC_READ | winapi::GENERIC_WRITE, + winapi::FILE_SHARE_WRITE, + ptr::null_mut(), + winapi::OPEN_EXISTING, + 0, + ptr::null_mut(), + ) + }; + if handle == winapi::INVALID_HANDLE_VALUE { + Err(io::Error::last_os_error()) + } else { + Ok(handle) + } +} + +// This test will only pass if it is running in an actual console, probably +#[test] +fn test_conout() { + assert!(conout().is_ok()) +} + +impl WinConsole { + fn apply(&mut self) -> io::Result<()> { + let out = try!(conout()); + let _unused = self.buf.flush(); + let mut accum: winapi::WORD = 0; + accum |= color_to_bits(self.foreground); + accum |= color_to_bits(self.background) << 4; + unsafe { + kernel32::SetConsoleTextAttribute(out, accum); + } + Ok(()) + } + + /// Returns `None` whenever the terminal cannot be created for some + /// reason. + pub fn new(out: T) -> io::Result> { + let fg; + let bg; + let handle = try!(conout()); + unsafe { + let mut buffer_info = ::std::mem::uninitialized(); + if kernel32::GetConsoleScreenBufferInfo(handle, &mut buffer_info) != 0 { + fg = bits_to_color(buffer_info.wAttributes); + bg = bits_to_color(buffer_info.wAttributes >> 4); + } else { + return Err(io::Error::last_os_error()) + } + } + Ok(WinConsole { + buf: out, + def_foreground: fg, + def_background: bg, + foreground: fg, + background: bg, + }) + } +} + +impl Write for WinConsole { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.buf.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.buf.flush() + } +} + +impl Terminal for WinConsole { + type Output = T; + + fn fg(&mut self, color: color::Color) -> io::Result { + self.foreground = color; + try!(self.apply()); + + Ok(true) + } + + fn bg(&mut self, color: color::Color) -> io::Result { + self.background = color; + try!(self.apply()); + + Ok(true) + } + + fn attr(&mut self, attr: Attr) -> io::Result { + match attr { + Attr::ForegroundColor(f) => { + self.foreground = f; + try!(self.apply()); + Ok(true) + }, + Attr::BackgroundColor(b) => { + self.background = b; + try!(self.apply()); + Ok(true) + }, + _ => Ok(false) + } + } + + fn supports_attr(&self, attr: Attr) -> bool { + // it claims support for underscore and reverse video, but I can't get + // it to do anything -cmr + match attr { + Attr::ForegroundColor(_) | Attr::BackgroundColor(_) => true, + _ => false + } + } + + fn reset(&mut self) -> io::Result { + self.foreground = self.def_foreground; + self.background = self.def_background; + try!(self.apply()); + + Ok(true) + } + + fn cursor_up(&mut self) -> io::Result { + let _unused = self.buf.flush(); + let handle = try!(conout()); + unsafe { + let mut buffer_info = ::std::mem::uninitialized(); + if kernel32::GetConsoleScreenBufferInfo(handle, &mut buffer_info) != 0 { + let (x, y) = (buffer_info.dwCursorPosition.X, buffer_info.dwCursorPosition.Y); + if y == 0 { + Ok(false) + } else { + let pos = winapi::COORD { X: x, Y: y - 1 }; + if kernel32::SetConsoleCursorPosition(handle, pos) != 0 { + Ok(true) + } else { + Err(io::Error::last_os_error()) + } + } + } else { + Err(io::Error::last_os_error()) + } + } + } + + fn delete_line(&mut self) -> io::Result { + let _unused = self.buf.flush(); + let handle = try!(conout()); + unsafe { + let mut buffer_info = ::std::mem::uninitialized(); + if kernel32::GetConsoleScreenBufferInfo(handle, &mut buffer_info) == 0 { + return Err(io::Error::last_os_error()) + } + let pos = buffer_info.dwCursorPosition; + let size = buffer_info.dwSize; + let num = (size.X - pos.X) as winapi::DWORD; + let mut written = 0; + if kernel32::FillConsoleOutputCharacterW(handle, 0, num, pos, &mut written) == 0 { + return Err(io::Error::last_os_error()) + } + if kernel32::FillConsoleOutputAttribute(handle, 0, num, pos, &mut written) == 0 { + return Err(io::Error::last_os_error()) + } + Ok(written != 0) + } + } + + fn carriage_return(&mut self) -> io::Result { + let _unused = self.buf.flush(); + let handle = try!(conout()); + unsafe { + let mut buffer_info = ::std::mem::uninitialized(); + if kernel32::GetConsoleScreenBufferInfo(handle, &mut buffer_info) != 0 { + let (x, y) = (buffer_info.dwCursorPosition.X, buffer_info.dwCursorPosition.Y); + if x == 0 { + Ok(false) + } else { + let pos = winapi::COORD { X: 0, Y: y }; + if kernel32::SetConsoleCursorPosition(handle, pos) != 0 { + Ok(true) + } else { + Err(io::Error::last_os_error()) + } + } + } else { + Err(io::Error::last_os_error()) + } + } + } + + fn get_ref<'a>(&'a self) -> &'a T { &self.buf } + + fn get_mut<'a>(&'a mut self) -> &'a mut T { &mut self.buf } + + fn into_inner(self) -> T where Self: Sized { self.buf } +} From 6a2603b6edfbf7d1621cdfb52fe3a0eed8f3a96d Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 5 Jun 2015 10:54:41 -0700 Subject: [PATCH 3/6] Squashed 'src/external/rustc_serialize/' content from commit e3115f3 git-subtree-dir: src/external/rustc_serialize git-subtree-split: e3115f387f98561a38caa147b776f01b4d4bb087 --- .gitignore | 2 + .travis.yml | 26 + Cargo.toml | 18 + LICENSE-APACHE | 201 ++ LICENSE-MIT | 25 + README.md | 24 + appveyor.yml | 11 + benches/base64.rs | 29 + benches/hex.rs | 28 + benches/json.rs | 84 + src/base64.rs | 406 ++++ src/collection_impls.rs | 186 ++ src/hex.rs | 209 +++ src/json.rs | 3906 +++++++++++++++++++++++++++++++++++++++ src/lib.rs | 55 + src/serialize.rs | 725 ++++++++ 16 files changed, 5935 insertions(+) create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 Cargo.toml create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100644 appveyor.yml create mode 100644 benches/base64.rs create mode 100644 benches/hex.rs create mode 100644 benches/json.rs create mode 100644 src/base64.rs create mode 100644 src/collection_impls.rs create mode 100644 src/hex.rs create mode 100644 src/json.rs create mode 100644 src/lib.rs create mode 100644 src/serialize.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000..4fffb2f89cbd8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +/Cargo.lock diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000000..eb446b23fe6bb --- /dev/null +++ b/.travis.yml @@ -0,0 +1,26 @@ +language: rust +rust: + - 1.0.0 + - beta + - nightly +sudo: false +script: + - cargo build --verbose + - cargo test --verbose + - | + [ $TRAVIS_RUST_VERSION != nightly ] || cargo bench --verbose + - cargo doc +after_success: | + [ $TRAVIS_BRANCH = master ] && + [ $TRAVIS_PULL_REQUEST = false ] && + [ $TRAVIS_RUST_VERSION = nightly ] && + echo '' > target/doc/index.html && + pip install ghp-import --user $USER && + $HOME/.local/bin/ghp-import -n target/doc && + git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages +env: + global: + secure: f0CwX+CnvRbEqK3H6zhBQe4u0t14OQvyd6nUhq/oXkJ6LdtrSx+qQtLSmAU7L8p1IXyP8csxv37bTdEB7/U1c6bJcN2OXHrw9nD0NDvZEs1zSZvFQBm+YBwV7EaposPHCeqee3X9b00g7+bObywMYtEkk7yD2NiOY9SjMRcjTLQ= +notifications: + email: + on_success: never diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000000000..1f8f70c5761ea --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,18 @@ +[package] + +name = "rustc-serialize" +version = "0.3.14" +authors = ["The Rust Project Developers"] +license = "MIT/Apache-2.0" +readme = "README.md" +repository = "https://github.com/rust-lang/rustc-serialize" +homepage = "https://github.com/rust-lang/rustc-serialize" +documentation = "http://doc.rust-lang.org/rustc-serialize" +description = """ +Generic serialization/deserialization support corresponding to the +`derive(RustcEncodable, RustcDecodable)` mode in the compiler. Also includes +support for hex, base64, and json encoding and decoding. +""" + +[dev-dependencies] +rand = "0.3" diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000000000..16fe87b06e802 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000000000..39d4bdb5acd31 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000000..e8db8b91e1d7b --- /dev/null +++ b/README.md @@ -0,0 +1,24 @@ +# rustc-serialize + +Serialization and deserialization support provided by the compiler in the form +of `derive(RustcEncodable, RustcDecodable)`. + +[![Linux Build Status](https://travis-ci.org/rust-lang/rustc-serialize.svg?branch=master)](https://travis-ci.org/rust-lang/rustc-serialize) +[![Windows Build Status](https://ci.appveyor.com/api/projects/status/ka194de75aapwpft?svg=true)](https://ci.appveyor.com/project/alexcrichton/rustc-serialize) + +[Documentation](http://doc.rust-lang.org/rustc-serialize) + +## Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] +rustc-serialize = "0.3" +``` + +and this to your crate root: + +```rust +extern crate rustc_serialize; +``` diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000000000..f74c851ad7100 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,11 @@ +install: + - ps: Start-FileDownload 'https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe' + - rust-nightly-i686-pc-windows-gnu.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" + - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin + - rustc -V + - cargo -V + +build: false + +test_script: + - cargo test --verbose diff --git a/benches/base64.rs b/benches/base64.rs new file mode 100644 index 0000000000000..629cb0a909b53 --- /dev/null +++ b/benches/base64.rs @@ -0,0 +1,29 @@ +#![feature(test)] + +extern crate test; +extern crate rustc_serialize; + +use rustc_serialize::base64::{FromBase64, ToBase64, STANDARD}; +use test::Bencher; + +#[bench] +fn bench_to_base64(b: &mut Bencher) { + let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ + ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; + b.iter(|| { + s.as_bytes().to_base64(STANDARD); + }); + b.bytes = s.len() as u64; +} + +#[bench] +fn bench_from_base64(b: &mut Bencher) { + let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ + ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; + let sb = s.as_bytes().to_base64(STANDARD); + b.iter(|| { + sb.from_base64().unwrap(); + }); + b.bytes = sb.len() as u64; +} + diff --git a/benches/hex.rs b/benches/hex.rs new file mode 100644 index 0000000000000..97a7735e81754 --- /dev/null +++ b/benches/hex.rs @@ -0,0 +1,28 @@ +#![feature(test)] + +extern crate test; +extern crate rustc_serialize; + +use test::Bencher; +use rustc_serialize::hex::{FromHex, ToHex}; + +#[bench] +fn bench_to_hex(b: &mut Bencher) { + let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ + ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; + b.iter(|| { + s.as_bytes().to_hex(); + }); + b.bytes = s.len() as u64; +} + +#[bench] +fn bench_from_hex(b: &mut Bencher) { + let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \ + ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン"; + let sb = s.as_bytes().to_hex(); + b.iter(|| { + sb.from_hex().unwrap(); + }); + b.bytes = sb.len() as u64; +} diff --git a/benches/json.rs b/benches/json.rs new file mode 100644 index 0000000000000..20768e116a7c4 --- /dev/null +++ b/benches/json.rs @@ -0,0 +1,84 @@ +#![feature(test)] + +extern crate test; +extern crate rustc_serialize; + +use std::string; +use rustc_serialize::json::{Json, Parser}; +use test::Bencher; + +#[bench] +fn bench_streaming_small(b: &mut Bencher) { + b.iter( || { + let mut parser = Parser::new( + r#"{ + "a": 1.0, + "b": [ + true, + "foo\nbar", + { "c": {"d": null} } + ] + }"#.chars() + ); + loop { + match parser.next() { + None => return, + _ => {} + } + } + }); +} +#[bench] +fn bench_small(b: &mut Bencher) { + b.iter( || { + let _ = Json::from_str(r#"{ + "a": 1.0, + "b": [ + true, + "foo\nbar", + { "c": {"d": null} } + ] + }"#); + }); +} + +#[bench] +fn bench_decode_hex_escape(b: &mut Bencher) { + let mut src = "\"".to_string(); + for _ in 0..10 { + src.push_str("\\uF975\\uf9bc\\uF9A0\\uF9C4\\uF975\\uf9bc\\uF9A0\\uF9C4"); + } + src.push_str("\""); + b.iter( || { + let _ = Json::from_str(&src); + }); +} + +fn big_json() -> string::String { + let mut src = "[\n".to_string(); + for _ in 0..500 { + src.push_str(r#"{ "a": true, "b": null, "c":3.1415, "d": "Hello world", "e": \ + [1,2,3]},"#); + } + src.push_str("{}]"); + return src; +} + +#[bench] +fn bench_streaming_large(b: &mut Bencher) { + let src = big_json(); + b.iter( || { + let mut parser = Parser::new(src.chars()); + loop { + match parser.next() { + None => return, + _ => {} + } + } + }); +} +#[bench] +fn bench_large(b: &mut Bencher) { + let src = big_json(); + b.iter( || { let _ = Json::from_str(&src); }); +} diff --git a/src/base64.rs b/src/base64.rs new file mode 100644 index 0000000000000..5449d7bade19c --- /dev/null +++ b/src/base64.rs @@ -0,0 +1,406 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// ignore-lexer-test FIXME #15679 + +//! Base64 binary-to-text encoding + +pub use self::FromBase64Error::*; +pub use self::CharacterSet::*; + +use std::fmt; +use std::error; + +/// Available encoding character sets +#[derive(Clone, Copy)] +pub enum CharacterSet { + /// The standard character set (uses `+` and `/`) + Standard, + /// The URL safe character set (uses `-` and `_`) + UrlSafe +} + +/// Available newline types +#[derive(Clone, Copy)] +pub enum Newline { + /// A linefeed (i.e. Unix-style newline) + LF, + /// A carriage return and a linefeed (i.e. Windows-style newline) + CRLF +} + +/// Contains configuration parameters for `to_base64`. +#[derive(Clone, Copy)] +pub struct Config { + /// Character set to use + pub char_set: CharacterSet, + /// Newline to use + pub newline: Newline, + /// True to pad output with `=` characters + pub pad: bool, + /// `Some(len)` to wrap lines at `len`, `None` to disable line wrapping + pub line_length: Option +} + +/// Configuration for RFC 4648 standard base64 encoding +pub static STANDARD: Config = + Config {char_set: Standard, newline: Newline::CRLF, pad: true, line_length: None}; + +/// Configuration for RFC 4648 base64url encoding +pub static URL_SAFE: Config = + Config {char_set: UrlSafe, newline: Newline::CRLF, pad: false, line_length: None}; + +/// Configuration for RFC 2045 MIME base64 encoding +pub static MIME: Config = + Config {char_set: Standard, newline: Newline::CRLF, pad: true, line_length: Some(76)}; + +static STANDARD_CHARS: &'static[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ + abcdefghijklmnopqrstuvwxyz\ + 0123456789+/"; + +static URLSAFE_CHARS: &'static[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\ + abcdefghijklmnopqrstuvwxyz\ + 0123456789-_"; + +/// A trait for converting a value to base64 encoding. +pub trait ToBase64 { + /// Converts the value of `self` to a base64 value following the specified + /// format configuration, returning the owned string. + fn to_base64(&self, config: Config) -> String; +} + +impl ToBase64 for [u8] { + /// Turn a vector of `u8` bytes into a base64 string. + /// + /// # Example + /// + /// ```rust + /// extern crate rustc_serialize; + /// use rustc_serialize::base64::{ToBase64, STANDARD}; + /// + /// fn main () { + /// let str = [52,32].to_base64(STANDARD); + /// println!("base 64 output: {:?}", str); + /// } + /// ``` + fn to_base64(&self, config: Config) -> String { + let bytes = match config.char_set { + Standard => STANDARD_CHARS, + UrlSafe => URLSAFE_CHARS + }; + + // In general, this Vec only needs (4/3) * self.len() memory, but + // addition is faster than multiplication and division. + let mut v = Vec::with_capacity(self.len() + self.len()); + let mut i = 0; + let mut cur_length = 0; + let len = self.len(); + let mod_len = len % 3; + let cond_len = len - mod_len; + let newline = match config.newline { + Newline::LF => "\n", + Newline::CRLF => "\r\n", + }; + while i < cond_len { + let (first, second, third) = (self[i], self[i + 1], self[i + 2]); + if let Some(line_length) = config.line_length { + if cur_length >= line_length { + v.extend(newline.bytes()); + cur_length = 0; + } + } + + let n = (first as u32) << 16 | + (second as u32) << 8 | + (third as u32); + + // This 24-bit number gets separated into four 6-bit numbers. + v.push(bytes[((n >> 18) & 63) as usize]); + v.push(bytes[((n >> 12) & 63) as usize]); + v.push(bytes[((n >> 6 ) & 63) as usize]); + v.push(bytes[(n & 63) as usize]); + + cur_length += 4; + i += 3; + } + + if mod_len != 0 { + if let Some(line_length) = config.line_length { + if cur_length >= line_length { + v.extend(newline.bytes()); + } + } + } + + // Heh, would be cool if we knew this was exhaustive + // (the dream of bounded integer types) + match mod_len { + 0 => (), + 1 => { + let n = (self[i] as u32) << 16; + v.push(bytes[((n >> 18) & 63) as usize]); + v.push(bytes[((n >> 12) & 63) as usize]); + if config.pad { + v.push(b'='); + v.push(b'='); + } + } + 2 => { + let n = (self[i] as u32) << 16 | + (self[i + 1] as u32) << 8; + v.push(bytes[((n >> 18) & 63) as usize]); + v.push(bytes[((n >> 12) & 63) as usize]); + v.push(bytes[((n >> 6 ) & 63) as usize]); + if config.pad { + v.push(b'='); + } + } + _ => panic!("Algebra is broken, please alert the math police") + } + + unsafe { String::from_utf8_unchecked(v) } + } +} + +/// A trait for converting from base64 encoded values. +pub trait FromBase64 { + /// Converts the value of `self`, interpreted as base64 encoded data, into + /// an owned vector of bytes, returning the vector. + fn from_base64(&self) -> Result, FromBase64Error>; +} + +/// Errors that can occur when decoding a base64 encoded string +#[derive(Clone, Copy)] +pub enum FromBase64Error { + /// The input contained a character not part of the base64 format + InvalidBase64Byte(u8, usize), + /// The input had an invalid length + InvalidBase64Length, +} + +impl fmt::Debug for FromBase64Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + InvalidBase64Byte(ch, idx) => + write!(f, "Invalid character '{}' at position {}", ch, idx), + InvalidBase64Length => write!(f, "Invalid length"), + } + } +} + +impl error::Error for FromBase64Error { + fn description(&self) -> &str { + match *self { + InvalidBase64Byte(_, _) => "invalid character", + InvalidBase64Length => "invalid length", + } + } +} + +impl fmt::Display for FromBase64Error { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self, f) + } +} + +impl FromBase64 for str { + /// Convert any base64 encoded string (literal, `@`, `&`, or `~`) + /// to the byte values it encodes. + /// + /// You can use the `String::from_utf8` function to turn a `Vec` into a + /// string with characters corresponding to those values. + /// + /// # Example + /// + /// This converts a string literal to base64 and back. + /// + /// ```rust + /// extern crate rustc_serialize; + /// use rustc_serialize::base64::{ToBase64, FromBase64, STANDARD}; + /// + /// fn main () { + /// let hello_str = b"Hello, World".to_base64(STANDARD); + /// println!("base64 output: {}", hello_str); + /// let res = hello_str.from_base64(); + /// if res.is_ok() { + /// let opt_bytes = String::from_utf8(res.unwrap()); + /// if opt_bytes.is_ok() { + /// println!("decoded from base64: {:?}", opt_bytes.unwrap()); + /// } + /// } + /// } + /// ``` + #[inline] + fn from_base64(&self) -> Result, FromBase64Error> { + self.as_bytes().from_base64() + } +} + +impl FromBase64 for [u8] { + fn from_base64(&self) -> Result, FromBase64Error> { + let mut r = Vec::with_capacity(self.len()); + let mut buf: u32 = 0; + let mut modulus = 0; + + let mut it = self.iter().enumerate(); + for (idx, &byte) in it.by_ref() { + let val = byte as u32; + + match byte { + b'A'...b'Z' => buf |= val - 0x41, + b'a'...b'z' => buf |= val - 0x47, + b'0'...b'9' => buf |= val + 0x04, + b'+' | b'-' => buf |= 0x3E, + b'/' | b'_' => buf |= 0x3F, + b'\r' | b'\n' => continue, + b'=' => break, + _ => return Err(InvalidBase64Byte(self[idx], idx)), + } + + buf <<= 6; + modulus += 1; + if modulus == 4 { + modulus = 0; + r.push((buf >> 22) as u8); + r.push((buf >> 14) as u8); + r.push((buf >> 6 ) as u8); + } + } + + for (idx, &byte) in it { + match byte { + b'=' | b'\r' | b'\n' => continue, + _ => return Err(InvalidBase64Byte(self[idx], idx)), + } + } + + match modulus { + 2 => { + r.push((buf >> 10) as u8); + } + 3 => { + r.push((buf >> 16) as u8); + r.push((buf >> 8 ) as u8); + } + 0 => (), + _ => return Err(InvalidBase64Length), + } + + Ok(r) + } +} + +#[cfg(test)] +mod tests { + use base64::{Config, Newline, FromBase64, ToBase64, STANDARD, URL_SAFE}; + + #[test] + fn test_to_base64_basic() { + assert_eq!("".as_bytes().to_base64(STANDARD), ""); + assert_eq!("f".as_bytes().to_base64(STANDARD), "Zg=="); + assert_eq!("fo".as_bytes().to_base64(STANDARD), "Zm8="); + assert_eq!("foo".as_bytes().to_base64(STANDARD), "Zm9v"); + assert_eq!("foob".as_bytes().to_base64(STANDARD), "Zm9vYg=="); + assert_eq!("fooba".as_bytes().to_base64(STANDARD), "Zm9vYmE="); + assert_eq!("foobar".as_bytes().to_base64(STANDARD), "Zm9vYmFy"); + } + + #[test] + fn test_to_base64_crlf_line_break() { + assert!(![08; 1000].to_base64(Config {line_length: None, ..STANDARD}) + .contains("\r\n")); + assert_eq!(b"foobar".to_base64(Config {line_length: Some(4), + ..STANDARD}), + "Zm9v\r\nYmFy"); + } + + #[test] + fn test_to_base64_lf_line_break() { + assert!(![08; 1000].to_base64(Config {line_length: None, + newline: Newline::LF, + ..STANDARD}) + .contains("\n")); + assert_eq!(b"foobar".to_base64(Config {line_length: Some(4), + newline: Newline::LF, + ..STANDARD}), + "Zm9v\nYmFy"); + } + + #[test] + fn test_to_base64_padding() { + assert_eq!("f".as_bytes().to_base64(Config {pad: false, ..STANDARD}), "Zg"); + assert_eq!("fo".as_bytes().to_base64(Config {pad: false, ..STANDARD}), "Zm8"); + } + + #[test] + fn test_to_base64_url_safe() { + assert_eq!([251, 255].to_base64(URL_SAFE), "-_8"); + assert_eq!([251, 255].to_base64(STANDARD), "+/8="); + } + + #[test] + fn test_from_base64_basic() { + assert_eq!("".from_base64().unwrap(), b""); + assert_eq!("Zg==".from_base64().unwrap(), b"f"); + assert_eq!("Zm8=".from_base64().unwrap(), b"fo"); + assert_eq!("Zm9v".from_base64().unwrap(), b"foo"); + assert_eq!("Zm9vYg==".from_base64().unwrap(), b"foob"); + assert_eq!("Zm9vYmE=".from_base64().unwrap(), b"fooba"); + assert_eq!("Zm9vYmFy".from_base64().unwrap(), b"foobar"); + } + + #[test] + fn test_from_base64_bytes() { + assert_eq!(b"Zm9vYmFy".from_base64().unwrap(), b"foobar"); + } + + #[test] + fn test_from_base64_newlines() { + assert_eq!("Zm9v\r\nYmFy".from_base64().unwrap(), + b"foobar"); + assert_eq!("Zm9vYg==\r\n".from_base64().unwrap(), + b"foob"); + assert_eq!("Zm9v\nYmFy".from_base64().unwrap(), + b"foobar"); + assert_eq!("Zm9vYg==\n".from_base64().unwrap(), + b"foob"); + } + + #[test] + fn test_from_base64_urlsafe() { + assert_eq!("-_8".from_base64().unwrap(), "+/8=".from_base64().unwrap()); + } + + #[test] + fn test_from_base64_invalid_char() { + assert!("Zm$=".from_base64().is_err()); + assert!("Zg==$".from_base64().is_err()); + } + + #[test] + fn test_from_base64_invalid_padding() { + assert!("Z===".from_base64().is_err()); + } + + #[test] + fn test_base64_random() { + use rand::{thread_rng, Rng}; + + for _ in 0..1000 { + let times = thread_rng().gen_range(1, 100); + let v = thread_rng().gen_iter::().take(times) + .collect::>(); + assert_eq!(v.to_base64(STANDARD) + .from_base64() + .unwrap(), + v); + } + } +} diff --git a/src/collection_impls.rs b/src/collection_impls.rs new file mode 100644 index 0000000000000..6ab4b7cef8a53 --- /dev/null +++ b/src/collection_impls.rs @@ -0,0 +1,186 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Implementations of serialization for structures found in libcollections + +use std::hash::Hash; + +use {Decodable, Encodable, Decoder, Encoder}; +use std::collections::{LinkedList, VecDeque, BTreeMap, BTreeSet, HashMap, HashSet}; + +impl< + T: Encodable +> Encodable for LinkedList { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_seq(self.len(), |s| { + for (i, e) in self.iter().enumerate() { + try!(s.emit_seq_elt(i, |s| e.encode(s))); + } + Ok(()) + }) + } +} + +impl Decodable for LinkedList { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_seq(|d, len| { + let mut list = LinkedList::new(); + for i in 0..len { + list.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); + } + Ok(list) + }) + } +} + +impl Encodable for VecDeque { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_seq(self.len(), |s| { + for (i, e) in self.iter().enumerate() { + try!(s.emit_seq_elt(i, |s| e.encode(s))); + } + Ok(()) + }) + } +} + +impl Decodable for VecDeque { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_seq(|d, len| { + let mut deque: VecDeque = VecDeque::new(); + for i in 0..len { + deque.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); + } + Ok(deque) + }) + } +} + +impl< + K: Encodable + PartialEq + Ord, + V: Encodable + PartialEq +> Encodable for BTreeMap { + fn encode(&self, e: &mut S) -> Result<(), S::Error> { + e.emit_map(self.len(), |e| { + let mut i = 0; + for (key, val) in self.iter() { + try!(e.emit_map_elt_key(i, |e| key.encode(e))); + try!(e.emit_map_elt_val(i, |e| val.encode(e))); + i += 1; + } + Ok(()) + }) + } +} + +impl< + K: Decodable + PartialEq + Ord, + V: Decodable + PartialEq +> Decodable for BTreeMap { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_map(|d, len| { + let mut map = BTreeMap::new(); + for i in 0..len { + let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); + let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); + map.insert(key, val); + } + Ok(map) + }) + } +} + +impl< + T: Encodable + PartialEq + Ord +> Encodable for BTreeSet { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_seq(self.len(), |s| { + let mut i = 0; + for e in self.iter() { + try!(s.emit_seq_elt(i, |s| e.encode(s))); + i += 1; + } + Ok(()) + }) + } +} + +impl< + T: Decodable + PartialEq + Ord +> Decodable for BTreeSet { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_seq(|d, len| { + let mut set = BTreeSet::new(); + for i in 0..len { + set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); + } + Ok(set) + }) + } +} + +impl Encodable for HashMap + where K: Encodable + Hash + Eq, + V: Encodable, +{ + fn encode(&self, e: &mut E) -> Result<(), E::Error> { + e.emit_map(self.len(), |e| { + let mut i = 0; + for (key, val) in self.iter() { + try!(e.emit_map_elt_key(i, |e| key.encode(e))); + try!(e.emit_map_elt_val(i, |e| val.encode(e))); + i += 1; + } + Ok(()) + }) + } +} + +impl Decodable for HashMap + where K: Decodable + Hash + Eq, + V: Decodable, +{ + fn decode(d: &mut D) -> Result, D::Error> { + d.read_map(|d, len| { + let mut map = HashMap::with_capacity(len); + for i in 0..len { + let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); + let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); + map.insert(key, val); + } + Ok(map) + }) + } +} + +impl Encodable for HashSet where T: Encodable + Hash + Eq { + fn encode(&self, s: &mut E) -> Result<(), E::Error> { + s.emit_seq(self.len(), |s| { + let mut i = 0; + for e in self.iter() { + try!(s.emit_seq_elt(i, |s| e.encode(s))); + i += 1; + } + Ok(()) + }) + } +} + +impl Decodable for HashSet where T: Decodable + Hash + Eq, { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_seq(|d, len| { + let mut set = HashSet::with_capacity(len); + for i in 0..len { + set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); + } + Ok(set) + }) + } +} diff --git a/src/hex.rs b/src/hex.rs new file mode 100644 index 0000000000000..08d25fdf2e36f --- /dev/null +++ b/src/hex.rs @@ -0,0 +1,209 @@ +// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// ignore-lexer-test FIXME #15679 + +//! Hex binary-to-text encoding + +pub use self::FromHexError::*; + +use std::fmt; +use std::error; + +/// A trait for converting a value to hexadecimal encoding +pub trait ToHex { + /// Converts the value of `self` to a hex value, returning the owned + /// string. + fn to_hex(&self) -> String; +} + +static CHARS: &'static[u8] = b"0123456789abcdef"; + +impl ToHex for [u8] { + /// Turn a vector of `u8` bytes into a hexadecimal string. + /// + /// # Example + /// + /// ```rust + /// extern crate rustc_serialize; + /// use rustc_serialize::hex::ToHex; + /// + /// fn main () { + /// let str = [52,32].to_hex(); + /// println!("{}", str); + /// } + /// ``` + fn to_hex(&self) -> String { + let mut v = Vec::with_capacity(self.len() * 2); + for &byte in self.iter() { + v.push(CHARS[(byte >> 4) as usize]); + v.push(CHARS[(byte & 0xf) as usize]); + } + + unsafe { + String::from_utf8_unchecked(v) + } + } +} + +/// A trait for converting hexadecimal encoded values +pub trait FromHex { + /// Converts the value of `self`, interpreted as hexadecimal encoded data, + /// into an owned vector of bytes, returning the vector. + fn from_hex(&self) -> Result, FromHexError>; +} + +/// Errors that can occur when decoding a hex encoded string +#[derive(Clone, Copy)] +pub enum FromHexError { + /// The input contained a character not part of the hex format + InvalidHexCharacter(char, usize), + /// The input had an invalid length + InvalidHexLength, +} + +impl fmt::Debug for FromHexError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + InvalidHexCharacter(ch, idx) => + write!(f, "Invalid character '{}' at position {}", ch, idx), + InvalidHexLength => write!(f, "Invalid input length"), + } + } +} + +impl error::Error for FromHexError { + fn description(&self) -> &str { + match *self { + InvalidHexCharacter(_, _) => "invalid character", + InvalidHexLength => "invalid length", + } + } +} + +impl fmt::Display for FromHexError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self, f) + } +} + +impl FromHex for str { + /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`) + /// to the byte values it encodes. + /// + /// You can use the `String::from_utf8` function to turn a + /// `Vec` into a string with characters corresponding to those values. + /// + /// # Example + /// + /// This converts a string literal to hexadecimal and back. + /// + /// ```rust + /// extern crate rustc_serialize; + /// use rustc_serialize::hex::{FromHex, ToHex}; + /// + /// fn main () { + /// let hello_str = "Hello, World".as_bytes().to_hex(); + /// println!("{}", hello_str); + /// let bytes = hello_str.from_hex().unwrap(); + /// println!("{:?}", bytes); + /// let result_str = String::from_utf8(bytes).unwrap(); + /// println!("{}", result_str); + /// } + /// ``` + fn from_hex(&self) -> Result, FromHexError> { + // This may be an overestimate if there is any whitespace + let mut b = Vec::with_capacity(self.len() / 2); + let mut modulus = 0; + let mut buf = 08; + + for (idx, byte) in self.bytes().enumerate() { + buf <<= 4; + + match byte { + b'A'...b'F' => buf |= byte - b'A' + 10, + b'a'...b'f' => buf |= byte - b'a' + 10, + b'0'...b'9' => buf |= byte - b'0', + b' '|b'\r'|b'\n'|b'\t' => { + buf >>= 4; + continue + } + _ => { + let ch = self[idx..].chars().next().unwrap(); + return Err(InvalidHexCharacter(ch, idx)) + } + } + + modulus += 1; + if modulus == 2 { + modulus = 0; + b.push(buf); + } + } + + match modulus { + 0 => Ok(b.into_iter().collect()), + _ => Err(InvalidHexLength), + } + } +} + +#[cfg(test)] +mod tests { + use hex::{FromHex, ToHex}; + + #[test] + pub fn test_to_hex() { + assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172"); + } + + #[test] + pub fn test_from_hex_okay() { + assert_eq!("666f6f626172".from_hex().unwrap(), + b"foobar"); + assert_eq!("666F6F626172".from_hex().unwrap(), + b"foobar"); + } + + #[test] + pub fn test_from_hex_odd_len() { + assert!("666".from_hex().is_err()); + assert!("66 6".from_hex().is_err()); + } + + #[test] + pub fn test_from_hex_invalid_char() { + assert!("66y6".from_hex().is_err()); + } + + #[test] + pub fn test_from_hex_ignores_whitespace() { + assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(), + b"foobar"); + } + + #[test] + pub fn test_to_hex_all_bytes() { + for i in 0..256 { + assert_eq!([i as u8].to_hex(), format!("{:02x}", i)); + } + } + + #[test] + pub fn test_from_hex_all_bytes() { + for i in 0..256 { + let ii: &[u8] = &[i as u8]; + assert_eq!(format!("{:02x}", i).from_hex().unwrap(), + ii); + assert_eq!(format!("{:02X}", i).from_hex().unwrap(), + ii); + } + } +} diff --git a/src/json.rs b/src/json.rs new file mode 100644 index 0000000000000..1fd4c430cda21 --- /dev/null +++ b/src/json.rs @@ -0,0 +1,3906 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Rust JSON serialization library +// Copyright (c) 2011 Google Inc. + +//! JSON parsing and serialization +//! +//! # What is JSON? +//! +//! JSON (JavaScript Object Notation) is a way to write data in Javascript. +//! Like XML, it allows to encode structured data in a text format that can be +//! easily read by humans Its simple syntax and native compatibility with +//! JavaScript have made it a widely used format. +//! +//! Data types that can be encoded are JavaScript types (see the `Json` enum +//! for more details): +//! +//! * `I64`: equivalent to rust's `i64` +//! * `U64`: equivalent to rust's `u64` +//! * `F64`: equivalent to rust's `f64` +//! * `Boolean`: equivalent to rust's `bool` +//! * `String`: equivalent to rust's `String` +//! * `Array`: equivalent to rust's `Vec`, but also allowing objects of +//! different types in the +//! same array +//! * `Object`: equivalent to rust's `BTreeMap` +//! * `Null` +//! +//! An object is a series of string keys mapping to values, in `"key": value` +//! format. Arrays are enclosed in square brackets ([ ... ]) and objects in +//! curly brackets ({ ... }). A simple JSON document encoding a person, +//! their age, address and phone numbers could look like +//! +//! ```ignore +//! { +//! "FirstName": "John", +//! "LastName": "Doe", +//! "Age": 43, +//! "Address": { +//! "Street": "Downing Street 10", +//! "City": "London", +//! "Country": "Great Britain" +//! }, +//! "PhoneNumbers": [ +//! "+44 1234567", +//! "+44 2345678" +//! ] +//! } +//! ``` +//! +//! # Rust Type-based Encoding and Decoding +//! +//! Rust provides a mechanism for low boilerplate encoding & decoding of values +//! to and from JSON via the serialization API. To be able to encode a piece +//! of data, it must implement the `rustc_serialize::Encodable` trait. To be +//! able to decode a piece of data, it must implement the +//! `rustc_serialize::Decodable` trait. The Rust compiler provides an +//! annotation to automatically generate the code for these traits: +//! `#[derive(RustcDecodable, RustcEncodable)]` +//! +//! The JSON API provides an enum `json::Json` and a trait `ToJson` to encode +//! objects. The `ToJson` trait provides a `to_json` method to convert an +//! object into a `json::Json` value. A `json::Json` value can be encoded as a +//! string or buffer using the functions described above. You can also use the +//! `json::Encoder` object, which implements the `Encoder` trait. +//! +//! When using `ToJson` the `Encodable` trait implementation is not +//! mandatory. +//! +//! # Examples of use +//! +//! ## Using Autoserialization +//! +//! Create a struct called `TestStruct` and serialize and deserialize it to and +//! from JSON using the serialization API, using the derived serialization code. +//! +//! ```rust +//! extern crate rustc_serialize; +//! use rustc_serialize::json; +//! +//! // Automatically generate `RustcDecodable` and `RustcEncodable` trait +//! // implementations +//! #[derive(RustcDecodable, RustcEncodable)] +//! pub struct TestStruct { +//! data_int: u8, +//! data_str: String, +//! data_vector: Vec, +//! } +//! +//! fn main() { +//! let object = TestStruct { +//! data_int: 1, +//! data_str: "homura".to_string(), +//! data_vector: vec![2,3,4,5], +//! }; +//! +//! // Serialize using `json::encode` +//! let encoded = json::encode(&object).unwrap(); +//! +//! // Deserialize using `json::decode` +//! let decoded: TestStruct = json::decode(&encoded).unwrap(); +//! } +//! ``` +//! +//! ## Using the `ToJson` trait +//! +//! The examples above use the `ToJson` trait to generate the JSON string, +//! which is required for custom mappings. +//! +//! ### Simple example of `ToJson` usage +//! +//! ```rust +//! extern crate rustc_serialize; +//! use rustc_serialize::json::{self, ToJson, Json}; +//! +//! // A custom data structure +//! struct ComplexNum { +//! a: f64, +//! b: f64, +//! } +//! +//! // JSON value representation +//! impl ToJson for ComplexNum { +//! fn to_json(&self) -> Json { +//! Json::String(format!("{}+{}i", self.a, self.b)) +//! } +//! } +//! +//! // Only generate `RustcEncodable` trait implementation +//! #[derive(RustcEncodable)] +//! pub struct ComplexNumRecord { +//! uid: u8, +//! dsc: String, +//! val: Json, +//! } +//! +//! fn main() { +//! let num = ComplexNum { a: 0.0001, b: 12.539 }; +//! let data: String = json::encode(&ComplexNumRecord{ +//! uid: 1, +//! dsc: "test".to_string(), +//! val: num.to_json(), +//! }).unwrap(); +//! println!("data: {}", data); +//! // data: {"uid":1,"dsc":"test","val":"0.0001+12.539i"}; +//! } +//! ``` +//! +//! ### Verbose example of `ToJson` usage +//! +//! ```rust +//! extern crate rustc_serialize; +//! use std::collections::BTreeMap; +//! use rustc_serialize::json::{self, Json, ToJson}; +//! +//! // Only generate `Decodable` trait implementation +//! #[derive(RustcDecodable)] +//! pub struct TestStruct { +//! data_int: u8, +//! data_str: String, +//! data_vector: Vec, +//! } +//! +//! // Specify encoding method manually +//! impl ToJson for TestStruct { +//! fn to_json(&self) -> Json { +//! let mut d = BTreeMap::new(); +//! // All standard types implement `to_json()`, so use it +//! d.insert("data_int".to_string(), self.data_int.to_json()); +//! d.insert("data_str".to_string(), self.data_str.to_json()); +//! d.insert("data_vector".to_string(), self.data_vector.to_json()); +//! Json::Object(d) +//! } +//! } +//! +//! fn main() { +//! // Serialize using `ToJson` +//! let input_data = TestStruct { +//! data_int: 1, +//! data_str: "madoka".to_string(), +//! data_vector: vec![2,3,4,5], +//! }; +//! let json_obj: Json = input_data.to_json(); +//! let json_str: String = json_obj.to_string(); +//! +//! // Deserialize like before +//! let decoded: TestStruct = json::decode(&json_str).unwrap(); +//! } +//! ``` +//! +//! ## Parsing a `str` to `Json` and reading the result +//! +//! ```rust +//! extern crate rustc_serialize; +//! use rustc_serialize::json::Json; +//! +//! fn main() { +//! let data = Json::from_str("{\"foo\": 13, \"bar\": \"baz\"}").unwrap(); +//! println!("data: {}", data); +//! // data: {"bar":"baz","foo":13} +//! println!("object? {}", data.is_object()); +//! // object? true +//! +//! let obj = data.as_object().unwrap(); +//! let foo = obj.get("foo").unwrap(); +//! +//! println!("array? {:?}", foo.as_array()); +//! // array? None +//! println!("u64? {:?}", foo.as_u64()); +//! // u64? Some(13u64) +//! +//! for (key, value) in obj.iter() { +//! println!("{}: {}", key, match *value { +//! Json::U64(v) => format!("{} (u64)", v), +//! Json::String(ref v) => format!("{} (string)", v), +//! _ => format!("other") +//! }); +//! } +//! // bar: baz (string) +//! // foo: 13 (u64) +//! } +//! ``` + +use self::JsonEvent::*; +use self::ErrorCode::*; +use self::ParserError::*; +use self::DecoderError::*; +use self::ParserState::*; +use self::InternalStackElement::*; + +use std::collections::{HashMap, BTreeMap}; +use std::error::Error as StdError; +use std::i64; +use std::io::prelude::*; +use std::mem::swap; +use std::ops::Index; +use std::str::FromStr; +use std::string; +use std::{char, f64, fmt, io, str}; + +use Encodable; + +/// Represents a json value +#[derive(Clone, PartialEq, PartialOrd, Debug)] +pub enum Json { + I64(i64), + U64(u64), + F64(f64), + String(string::String), + Boolean(bool), + Array(self::Array), + Object(self::Object), + Null, +} + +pub type Array = Vec; +pub type Object = BTreeMap; + +pub struct PrettyJson<'a> { inner: &'a Json } + +pub struct AsJson<'a, T: 'a> { inner: &'a T } +pub struct AsPrettyJson<'a, T: 'a> { inner: &'a T, indent: Option } + +/// The errors that can arise while parsing a JSON stream. +#[derive(Clone, Copy, PartialEq)] +pub enum ErrorCode { + InvalidSyntax, + InvalidNumber, + EOFWhileParsingObject, + EOFWhileParsingArray, + EOFWhileParsingValue, + EOFWhileParsingString, + KeyMustBeAString, + ExpectedColon, + TrailingCharacters, + TrailingComma, + InvalidEscape, + InvalidUnicodeCodePoint, + LoneLeadingSurrogateInHexEscape, + UnexpectedEndOfHexEscape, + UnrecognizedHex, + NotFourDigit, + ControlCharacterInString, + NotUtf8, +} + +#[derive(Debug)] +pub enum ParserError { + /// msg, line, col + SyntaxError(ErrorCode, usize, usize), + IoError(io::Error), +} + +impl PartialEq for ParserError { + fn eq(&self, other: &ParserError) -> bool { + match (self, other) { + (&SyntaxError(msg0, line0, col0), &SyntaxError(msg1, line1, col1)) => + msg0 == msg1 && line0 == line1 && col0 == col1, + (&IoError(_), _) => false, + (_, &IoError(_)) => false, + } + } +} + +// Builder and Parser have the same errors. +pub type BuilderError = ParserError; + +#[derive(PartialEq, Debug)] +pub enum DecoderError { + ParseError(ParserError), + ExpectedError(string::String, string::String), + MissingFieldError(string::String), + UnknownVariantError(string::String), + ApplicationError(string::String), + EOF, +} + +#[derive(Copy, Debug)] +pub enum EncoderError { + FmtError(fmt::Error), + BadHashmapKey, +} + +impl Clone for EncoderError { + fn clone(&self) -> Self { *self } +} + +/// Returns a readable error string for a given error code. +pub fn error_str(error: ErrorCode) -> &'static str { + match error { + InvalidSyntax => "invalid syntax", + InvalidNumber => "invalid number", + EOFWhileParsingObject => "EOF While parsing object", + EOFWhileParsingArray => "EOF While parsing array", + EOFWhileParsingValue => "EOF While parsing value", + EOFWhileParsingString => "EOF While parsing string", + KeyMustBeAString => "key must be a string", + ExpectedColon => "expected `:`", + TrailingCharacters => "trailing characters", + TrailingComma => "trailing comma", + InvalidEscape => "invalid escape", + UnrecognizedHex => "invalid \\u{ esc}ape (unrecognized hex)", + NotFourDigit => "invalid \\u{ esc}ape (not four digits)", + ControlCharacterInString => "unescaped control character in string", + NotUtf8 => "contents not utf-8", + InvalidUnicodeCodePoint => "invalid Unicode code point", + LoneLeadingSurrogateInHexEscape => "lone leading surrogate in hex escape", + UnexpectedEndOfHexEscape => "unexpected end of hex escape", + } +} + +/// Shortcut function to decode a JSON `&str` into an object +pub fn decode(s: &str) -> DecodeResult { + let json = match Json::from_str(s) { + Ok(x) => x, + Err(e) => return Err(ParseError(e)) + }; + + let mut decoder = Decoder::new(json); + ::Decodable::decode(&mut decoder) +} + +/// Shortcut function to encode a `T` into a JSON `String` +pub fn encode(object: &T) -> EncodeResult { + let mut s = String::new(); + { + let mut encoder = Encoder::new(&mut s); + try!(object.encode(&mut encoder)); + } + Ok(s) +} + +impl fmt::Debug for ErrorCode { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + error_str(*self).fmt(f) + } +} + +impl StdError for DecoderError { + fn description(&self) -> &str { "decoder error" } + fn cause(&self) -> Option<&StdError> { + match *self { + DecoderError::ParseError(ref e) => Some(e), + _ => None, + } + } +} + +impl fmt::Display for DecoderError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self, f) + } +} + +impl From for DecoderError { + fn from(err: ParserError) -> DecoderError { + ParseError(From::from(err)) + } +} + +impl StdError for ParserError { + fn description(&self) -> &str { "failed to parse json" } +} + +impl fmt::Display for ParserError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self, f) + } +} + +impl From for ParserError { + fn from(err: io::Error) -> ParserError { + IoError(err) + } +} + +impl StdError for EncoderError { + fn description(&self) -> &str { "encoder error" } +} + +impl fmt::Display for EncoderError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self, f) + } +} + +impl From for EncoderError { + fn from(err: fmt::Error) -> EncoderError { EncoderError::FmtError(err) } +} + +pub type EncodeResult = Result; +pub type DecodeResult = Result; + +fn escape_str(wr: &mut fmt::Write, v: &str) -> EncodeResult<()> { + try!(wr.write_str("\"")); + + let mut start = 0; + + for (i, byte) in v.bytes().enumerate() { + let escaped = match byte { + b'"' => "\\\"", + b'\\' => "\\\\", + b'\x00' => "\\u0000", + b'\x01' => "\\u0001", + b'\x02' => "\\u0002", + b'\x03' => "\\u0003", + b'\x04' => "\\u0004", + b'\x05' => "\\u0005", + b'\x06' => "\\u0006", + b'\x07' => "\\u0007", + b'\x08' => "\\b", + b'\t' => "\\t", + b'\n' => "\\n", + b'\x0b' => "\\u000b", + b'\x0c' => "\\f", + b'\r' => "\\r", + b'\x0e' => "\\u000e", + b'\x0f' => "\\u000f", + b'\x10' => "\\u0010", + b'\x11' => "\\u0011", + b'\x12' => "\\u0012", + b'\x13' => "\\u0013", + b'\x14' => "\\u0014", + b'\x15' => "\\u0015", + b'\x16' => "\\u0016", + b'\x17' => "\\u0017", + b'\x18' => "\\u0018", + b'\x19' => "\\u0019", + b'\x1a' => "\\u001a", + b'\x1b' => "\\u001b", + b'\x1c' => "\\u001c", + b'\x1d' => "\\u001d", + b'\x1e' => "\\u001e", + b'\x1f' => "\\u001f", + b'\x7f' => "\\u007f", + _ => { continue; } + }; + + if start < i { + try!(wr.write_str(&v[start..i])); + } + + try!(wr.write_str(escaped)); + + start = i + 1; + } + + if start != v.len() { + try!(wr.write_str(&v[start..])); + } + + try!(wr.write_str("\"")); + Ok(()) +} + +fn escape_char(writer: &mut fmt::Write, v: char) -> EncodeResult<()> { + let mut buf = [0; 4]; + let _ = write!(&mut &mut buf[..], "{}", v); + let buf = unsafe { str::from_utf8_unchecked(&buf[..v.len_utf8()]) }; + escape_str(writer, buf) +} + +fn spaces(wr: &mut fmt::Write, n: u32) -> EncodeResult<()> { + let mut n = n as usize; + const BUF: &'static str = " "; + + while n >= BUF.len() { + try!(wr.write_str(BUF)); + n -= BUF.len(); + } + + if n > 0 { + try!(wr.write_str(&BUF[..n])); + } + Ok(()) +} + +fn fmt_number_or_null(v: f64) -> string::String { + use std::num::FpCategory::{Nan, Infinite}; + + match v.classify() { + Nan | Infinite => "null".to_string(), + _ => { + let s = v.to_string(); + if s.contains(".") {s} else {s + ".0"} + } + } +} + +macro_rules! emit_enquoted_if_mapkey { + ($enc:ident,$e:expr) => { + if $enc.is_emitting_map_key { + try!(write!($enc.writer, "\"{}\"", $e)); + Ok(()) + } else { + try!(write!($enc.writer, "{}", $e)); + Ok(()) + } + } +} + +enum EncodingFormat { + Compact, + Pretty { + curr_indent: u32, + indent: u32 + } +} + +/// A structure for implementing serialization to JSON. +pub struct Encoder<'a> { + writer: &'a mut (fmt::Write+'a), + format : EncodingFormat, + is_emitting_map_key: bool, +} + +impl<'a> Encoder<'a> { + /// Creates a new encoder whose output will be written in human-readable + /// JSON to the specified writer + pub fn new_pretty(writer: &'a mut fmt::Write) -> Encoder<'a> { + Encoder { + writer: writer, + format: EncodingFormat::Pretty { + curr_indent: 0, + indent: 2, + }, + is_emitting_map_key: false, + } + } + + /// Creates a new encoder whose output will be written in compact + /// JSON to the specified writer + pub fn new(writer: &'a mut fmt::Write) -> Encoder<'a> { + Encoder { + writer: writer, + format: EncodingFormat::Compact, + is_emitting_map_key: false, + } + } + + /// Set the number of spaces to indent for each level. + /// This is safe to set during encoding. + pub fn set_indent(&mut self, new_indent: u32) -> Result<(), ()> { + if let EncodingFormat::Pretty{ref mut curr_indent, ref mut indent} = self.format { + // self.indent very well could be 0 so we need to use checked division. + let level = curr_indent.checked_div(*indent).unwrap_or(0); + *indent = new_indent; + *curr_indent = level * *indent; + Ok(()) + } else { + Err(()) + } + } +} + +impl<'a> ::Encoder for Encoder<'a> { + type Error = EncoderError; + + fn emit_nil(&mut self) -> EncodeResult<()> { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + try!(write!(self.writer, "null")); + Ok(()) + } + + fn emit_usize(&mut self, v: usize) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_u64(&mut self, v: u64) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_u32(&mut self, v: u32) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_u16(&mut self, v: u16) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_u8(&mut self, v: u8) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + + fn emit_isize(&mut self, v: isize) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_i64(&mut self, v: i64) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_i32(&mut self, v: i32) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_i16(&mut self, v: i16) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + fn emit_i8(&mut self, v: i8) -> EncodeResult<()> { emit_enquoted_if_mapkey!(self, v) } + + fn emit_bool(&mut self, v: bool) -> EncodeResult<()> { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if v { + try!(write!(self.writer, "true")); + } else { + try!(write!(self.writer, "false")); + } + Ok(()) + } + + fn emit_f64(&mut self, v: f64) -> EncodeResult<()> { + emit_enquoted_if_mapkey!(self, fmt_number_or_null(v)) + } + fn emit_f32(&mut self, v: f32) -> EncodeResult<()> { + self.emit_f64(v as f64) + } + + fn emit_char(&mut self, v: char) -> EncodeResult<()> { + escape_char(self.writer, v) + } + fn emit_str(&mut self, v: &str) -> EncodeResult<()> { + escape_str(self.writer, v) + } + + fn emit_enum(&mut self, _name: &str, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + f(self) + } + + fn emit_enum_variant(&mut self, + name: &str, + _id: usize, + cnt: usize, + f: F) + -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + // enums are encoded as strings or objects + // Bunny => "Bunny" + // Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]} + if cnt == 0 { + escape_str(self.writer, name) + } else { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + try!(write!(self.writer, "{{\n")); + *curr_indent += indent; + try!(spaces(self.writer, *curr_indent)); + try!(write!(self.writer, "\"variant\": ")); + try!(escape_str(self.writer, name)); + try!(write!(self.writer, ",\n")); + try!(spaces(self.writer, *curr_indent)); + try!(write!(self.writer, "\"fields\": [\n")); + *curr_indent += indent; + } else { + try!(write!(self.writer, "{{\"variant\":")); + try!(escape_str(self.writer, name)); + try!(write!(self.writer, ",\"fields\":[")); + } + try!(f(self)); + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + *curr_indent -= indent; + try!(write!(self.writer, "\n")); + try!(spaces(self.writer, *curr_indent)); + *curr_indent -= indent; + try!(write!(self.writer, "]\n")); + try!(spaces(self.writer, *curr_indent)); + try!(write!(self.writer, "}}")); + } else { + try!(write!(self.writer, "]}}")); + } + Ok(()) + } + } + + fn emit_enum_variant_arg(&mut self, idx: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if idx != 0 { + try!(write!(self.writer, ",")); + if let EncodingFormat::Pretty{..} = self.format { + try!(write!(self.writer, "\n")); + } + } + if let EncodingFormat::Pretty{curr_indent, ..} = self.format { + try!(spaces(self.writer, curr_indent)); + } + f(self) + } + + fn emit_enum_struct_variant(&mut self, + name: &str, + id: usize, + cnt: usize, + f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + self.emit_enum_variant(name, id, cnt, f) + } + + fn emit_enum_struct_variant_field(&mut self, + _: &str, + idx: usize, + f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + self.emit_enum_variant_arg(idx, f) + } + + + fn emit_struct(&mut self, _: &str, len: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if len == 0 { + try!(write!(self.writer, "{{}}")); + } else { + try!(write!(self.writer, "{{")); + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + *curr_indent += indent; + } + try!(f(self)); + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + *curr_indent -= indent; + try!(write!(self.writer, "\n")); + try!(spaces(self.writer, *curr_indent)); + } + try!(write!(self.writer, "}}")); + } + Ok(()) + } + + fn emit_struct_field(&mut self, name: &str, idx: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if idx != 0 { + try!(write!(self.writer, ",")); + } + if let EncodingFormat::Pretty{curr_indent, ..} = self.format { + try!(write!(self.writer, "\n")); + try!(spaces(self.writer, curr_indent)); + } + try!(escape_str(self.writer, name)); + if let EncodingFormat::Pretty{..} = self.format { + try!(write!(self.writer, ": ")); + } else { + try!(write!(self.writer, ":")); + } + f(self) + } + + fn emit_tuple(&mut self, len: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + self.emit_seq(len, f) + } + fn emit_tuple_arg(&mut self, idx: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + self.emit_seq_elt(idx, f) + } + + fn emit_tuple_struct(&mut self, _: &str, len: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + self.emit_seq(len, f) + } + fn emit_tuple_struct_arg(&mut self, idx: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + self.emit_seq_elt(idx, f) + } + + fn emit_option(&mut self, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + f(self) + } + fn emit_option_none(&mut self) -> EncodeResult<()> { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + self.emit_nil() + } + fn emit_option_some(&mut self, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + f(self) + } + + fn emit_seq(&mut self, len: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if len == 0 { + try!(write!(self.writer, "[]")); + } else { + try!(write!(self.writer, "[")); + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + *curr_indent += indent; + } + try!(f(self)); + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + *curr_indent -= indent; + try!(write!(self.writer, "\n")); + try!(spaces(self.writer, *curr_indent)); + } + try!(write!(self.writer, "]")); + } + Ok(()) + } + + fn emit_seq_elt(&mut self, idx: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if idx != 0 { + try!(write!(self.writer, ",")); + } + if let EncodingFormat::Pretty{ref mut curr_indent, ..} = self.format { + try!(write!(self.writer, "\n")); + try!(spaces(self.writer, *curr_indent)); + } + f(self) + } + + fn emit_map(&mut self, len: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if len == 0 { + try!(write!(self.writer, "{{}}")); + } else { + try!(write!(self.writer, "{{")); + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + *curr_indent += indent; + } + try!(f(self)); + if let EncodingFormat::Pretty{ref mut curr_indent, indent} = self.format { + *curr_indent -= indent; + try!(write!(self.writer, "\n")); + try!(spaces(self.writer, *curr_indent)); + } + try!(write!(self.writer, "}}")); + } + Ok(()) + } + + fn emit_map_elt_key(&mut self, idx: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if idx != 0 { + try!(write!(self.writer, ",")); + } + if let EncodingFormat::Pretty{curr_indent, ..} = self.format { + try!(write!(self.writer, "\n")); + try!(spaces(self.writer, curr_indent)); + } + self.is_emitting_map_key = true; + try!(f(self)); + self.is_emitting_map_key = false; + Ok(()) + } + + fn emit_map_elt_val(&mut self, _idx: usize, f: F) -> EncodeResult<()> where + F: FnOnce(&mut Encoder<'a>) -> EncodeResult<()>, + { + if self.is_emitting_map_key { return Err(EncoderError::BadHashmapKey); } + if let EncodingFormat::Pretty{..} = self.format { + try!(write!(self.writer, ": ")); + } else { + try!(write!(self.writer, ":")); + } + f(self) + } +} + +impl Encodable for Json { + fn encode(&self, e: &mut S) -> Result<(), S::Error> { + match *self { + Json::I64(v) => v.encode(e), + Json::U64(v) => v.encode(e), + Json::F64(v) => v.encode(e), + Json::String(ref v) => v.encode(e), + Json::Boolean(v) => v.encode(e), + Json::Array(ref v) => v.encode(e), + Json::Object(ref v) => v.encode(e), + Json::Null => e.emit_nil(), + } + } +} + +/// Create an `AsJson` wrapper which can be used to print a value as JSON +/// on-the-fly via `write!` +pub fn as_json(t: &T) -> AsJson { + AsJson { inner: t } +} + +/// Create an `AsPrettyJson` wrapper which can be used to print a value as JSON +/// on-the-fly via `write!` +pub fn as_pretty_json(t: &T) -> AsPrettyJson { + AsPrettyJson { inner: t, indent: None } +} + +impl Json { + /// Decodes a json value from an `&mut io::Read` + pub fn from_reader(rdr: &mut io::Read) -> Result { + let contents = { + let mut c = Vec::new(); + try!(rdr.read_to_end(&mut c)); + c + }; + let s = match str::from_utf8(&contents).ok() { + Some(s) => s, + _ => return Err(SyntaxError(NotUtf8, 0, 0)) + }; + let mut builder = Builder::new(s.chars()); + builder.build() + } + + /// Decodes a json value from a string + pub fn from_str(s: &str) -> Result { + let mut builder = Builder::new(s.chars()); + builder.build() + } + + /// Borrow this json object as a pretty object to generate a pretty + /// representation for it via `Display`. + pub fn pretty(&self) -> PrettyJson { + PrettyJson { inner: self } + } + + /// If the Json value is an Object, returns the value associated with the provided key. + /// Otherwise, returns None. + pub fn find<'a>(&'a self, key: &str) -> Option<&'a Json>{ + match self { + &Json::Object(ref map) => map.get(key), + _ => None + } + } + + /// Attempts to get a nested Json Object for each key in `keys`. + /// If any key is found not to exist, find_path will return None. + /// Otherwise, it will return the Json value associated with the final key. + pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{ + let mut target = self; + for key in keys.iter() { + match target.find(*key) { + Some(t) => { target = t; }, + None => return None + } + } + Some(target) + } + + /// If the Json value is an Object, performs a depth-first search until + /// a value associated with the provided key is found. If no value is found + /// or the Json value is not an Object, returns None. + pub fn search<'a>(&'a self, key: &str) -> Option<&'a Json> { + match self { + &Json::Object(ref map) => { + match map.get(key) { + Some(json_value) => Some(json_value), + None => { + for (_, v) in map.iter() { + match v.search(key) { + x if x.is_some() => return x, + _ => () + } + } + None + } + } + }, + _ => None + } + } + + /// Returns true if the Json value is an Object. Returns false otherwise. + pub fn is_object<'a>(&'a self) -> bool { + self.as_object().is_some() + } + + /// If the Json value is an Object, returns the associated BTreeMap. + /// Returns None otherwise. + pub fn as_object<'a>(&'a self) -> Option<&'a Object> { + match self { + &Json::Object(ref map) => Some(map), + _ => None + } + } + + /// If the Json value is an Object, returns the associated mutable BTreeMap. + /// Returns None otherwise. + pub fn as_object_mut<'a>(&'a mut self) -> Option<&'a mut Object> { + match self { + &mut Json::Object(ref mut map) => Some(map), + _ => None + } + } + + /// Returns true if the Json value is an Array. Returns false otherwise. + pub fn is_array<'a>(&'a self) -> bool { + self.as_array().is_some() + } + + /// If the Json value is an Array, returns the associated vector. + /// Returns None otherwise. + pub fn as_array<'a>(&'a self) -> Option<&'a Array> { + match self { + &Json::Array(ref array) => Some(&*array), + _ => None + } + } + + /// If the Json value is an Array, returns the associated mutable vector. + /// Returns None otherwise. + pub fn as_array_mut<'a>(&'a mut self) -> Option<&'a mut Array> { + match self { + &mut Json::Array(ref mut list) => Some(list), + _ => None + } + } + + /// Returns true if the Json value is a String. Returns false otherwise. + pub fn is_string<'a>(&'a self) -> bool { + self.as_string().is_some() + } + + /// If the Json value is a String, returns the associated str. + /// Returns None otherwise. + pub fn as_string<'a>(&'a self) -> Option<&'a str> { + match *self { + Json::String(ref s) => Some(&s), + _ => None + } + } + + /// Returns true if the Json value is a Number. Returns false otherwise. + pub fn is_number(&self) -> bool { + match *self { + Json::I64(_) | Json::U64(_) | Json::F64(_) => true, + _ => false, + } + } + + /// Returns true if the Json value is a i64. Returns false otherwise. + pub fn is_i64(&self) -> bool { + match *self { + Json::I64(_) => true, + _ => false, + } + } + + /// Returns true if the Json value is a u64. Returns false otherwise. + pub fn is_u64(&self) -> bool { + match *self { + Json::U64(_) => true, + _ => false, + } + } + + /// Returns true if the Json value is a f64. Returns false otherwise. + pub fn is_f64(&self) -> bool { + match *self { + Json::F64(_) => true, + _ => false, + } + } + + /// If the Json value is a number, return or cast it to a i64. + /// Returns None otherwise. + pub fn as_i64(&self) -> Option { + match *self { + Json::I64(n) => Some(n), + Json::U64(n) if n >= i64::MAX as u64 => None, + Json::U64(n) => Some(n as i64), + _ => None + } + } + + /// If the Json value is a number, return or cast it to a u64. + /// Returns None otherwise. + pub fn as_u64(&self) -> Option { + match *self { + Json::I64(n) if n >= 0 => Some(n as u64), + Json::U64(n) => Some(n), + _ => None + } + } + + /// If the Json value is a number, return or cast it to a f64. + /// Returns None otherwise. + pub fn as_f64(&self) -> Option { + match *self { + Json::I64(n) => Some(n as f64), + Json::U64(n) => Some(n as f64), + Json::F64(n) => Some(n), + _ => None + } + } + + /// Returns true if the Json value is a Boolean. Returns false otherwise. + pub fn is_boolean(&self) -> bool { + self.as_boolean().is_some() + } + + /// If the Json value is a Boolean, returns the associated bool. + /// Returns None otherwise. + pub fn as_boolean(&self) -> Option { + match self { + &Json::Boolean(b) => Some(b), + _ => None + } + } + + /// Returns true if the Json value is a Null. Returns false otherwise. + pub fn is_null(&self) -> bool { + self.as_null().is_some() + } + + /// If the Json value is a Null, returns (). + /// Returns None otherwise. + pub fn as_null(&self) -> Option<()> { + match self { + &Json::Null => Some(()), + _ => None + } + } +} + +impl<'a> Index<&'a str> for Json { + type Output = Json; + + fn index(&self, idx: &str) -> &Json { + self.find(idx).unwrap() + } +} + +impl Index for Json { + type Output = Json; + + fn index<'a>(&'a self, idx: usize) -> &'a Json { + match self { + &Json::Array(ref v) => &v[idx], + _ => panic!("can only index Json with usize if it is an array") + } + } +} + +/// The output of the streaming parser. +#[derive(PartialEq, Debug)] +pub enum JsonEvent { + ObjectStart, + ObjectEnd, + ArrayStart, + ArrayEnd, + BooleanValue(bool), + I64Value(i64), + U64Value(u64), + F64Value(f64), + StringValue(string::String), + NullValue, + Error(ParserError), +} + +#[derive(PartialEq, Debug)] +enum ParserState { + // Parse a value in an array, true means first element. + ParseArray(bool), + // Parse ',' or ']' after an element in an array. + ParseArrayComma, + // Parse a key:value in an object, true means first element. + ParseObject(bool), + // Parse ',' or ']' after an element in an object. + ParseObjectComma, + // Initial state. + ParseStart, + // Expecting the stream to end. + ParseBeforeFinish, + // Parsing can't continue. + ParseFinished, +} + +/// A Stack represents the current position of the parser in the logical +/// structure of the JSON stream. +/// For example foo.bar[3].x +pub struct Stack { + stack: Vec, + str_buffer: Vec, +} + +/// StackElements compose a Stack. +/// For example, Key("foo"), Key("bar"), Index(3) and Key("x") are the +/// StackElements compositing the stack that represents foo.bar[3].x +#[derive(PartialEq, Clone, Debug)] +pub enum StackElement<'l> { + Index(u32), + Key(&'l str), +} + +// Internally, Key elements are stored as indices in a buffer to avoid +// allocating a string for every member of an object. +#[derive(PartialEq, Clone, Debug)] +enum InternalStackElement { + InternalIndex(u32), + InternalKey(u16, u16), // start, size +} + +impl Stack { + pub fn new() -> Stack { + Stack { stack: Vec::new(), str_buffer: Vec::new() } + } + + /// Returns The number of elements in the Stack. + pub fn len(&self) -> usize { self.stack.len() } + + /// Returns true if the stack is empty. + pub fn is_empty(&self) -> bool { self.stack.is_empty() } + + /// Provides access to the StackElement at a given index. + /// lower indices are at the bottom of the stack while higher indices are + /// at the top. + pub fn get<'l>(&'l self, idx: usize) -> StackElement<'l> { + match self.stack[idx] { + InternalIndex(i) => StackElement::Index(i), + InternalKey(start, size) => { + StackElement::Key(str::from_utf8( + &self.str_buffer[start as usize .. start as usize + size as usize]).unwrap()) + } + } + } + + /// Compares this stack with an array of StackElements. + pub fn is_equal_to(&self, rhs: &[StackElement]) -> bool { + if self.stack.len() != rhs.len() { return false; } + for i in 0..rhs.len() { + if self.get(i) != rhs[i] { return false; } + } + return true; + } + + /// Returns true if the bottom-most elements of this stack are the same as + /// the ones passed as parameter. + pub fn starts_with(&self, rhs: &[StackElement]) -> bool { + if self.stack.len() < rhs.len() { return false; } + for i in 0..rhs.len() { + if self.get(i) != rhs[i] { return false; } + } + return true; + } + + /// Returns true if the top-most elements of this stack are the same as + /// the ones passed as parameter. + pub fn ends_with(&self, rhs: &[StackElement]) -> bool { + if self.stack.len() < rhs.len() { return false; } + let offset = self.stack.len() - rhs.len(); + for i in 0..rhs.len() { + if self.get(i + offset) != rhs[i] { return false; } + } + return true; + } + + /// Returns the top-most element (if any). + pub fn top<'l>(&'l self) -> Option> { + return match self.stack.last() { + None => None, + Some(&InternalIndex(i)) => Some(StackElement::Index(i)), + Some(&InternalKey(start, size)) => { + Some(StackElement::Key(str::from_utf8( + &self.str_buffer[start as usize .. (start+size) as usize] + ).unwrap())) + } + } + } + + // Used by Parser to insert Key elements at the top of the stack. + fn push_key(&mut self, key: string::String) { + self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16)); + for c in key.as_bytes().iter() { + self.str_buffer.push(*c); + } + } + + // Used by Parser to insert Index elements at the top of the stack. + fn push_index(&mut self, index: u32) { + self.stack.push(InternalIndex(index)); + } + + // Used by Parser to remove the top-most element of the stack. + fn pop(&mut self) { + assert!(!self.is_empty()); + match *self.stack.last().unwrap() { + InternalKey(_, sz) => { + let new_size = self.str_buffer.len() - sz as usize; + self.str_buffer.truncate(new_size); + } + InternalIndex(_) => {} + } + self.stack.pop(); + } + + // Used by Parser to test whether the top-most element is an index. + fn last_is_index(&self) -> bool { + if self.is_empty() { return false; } + return match *self.stack.last().unwrap() { + InternalIndex(_) => true, + _ => false, + } + } + + // Used by Parser to increment the index of the top-most element. + fn bump_index(&mut self) { + let len = self.stack.len(); + let idx = match *self.stack.last().unwrap() { + InternalIndex(i) => { i + 1 } + _ => { panic!(); } + }; + self.stack[len - 1] = InternalIndex(idx); + } +} + +/// A streaming JSON parser implemented as an iterator of JsonEvent, consuming +/// an iterator of char. +pub struct Parser { + rdr: T, + ch: Option, + line: usize, + col: usize, + // We maintain a stack representing where we are in the logical structure + // of the JSON stream. + stack: Stack, + // A state machine is kept to make it possible to interrupt and resume parsing. + state: ParserState, +} + +impl> Iterator for Parser { + type Item = JsonEvent; + + fn next(&mut self) -> Option { + if self.state == ParseFinished { + return None; + } + + if self.state == ParseBeforeFinish { + self.parse_whitespace(); + // Make sure there is no trailing characters. + if self.eof() { + self.state = ParseFinished; + return None; + } else { + return Some(self.error_event(TrailingCharacters)); + } + } + + return Some(self.parse()); + } +} + +impl> Parser { + /// Creates the JSON parser. + pub fn new(rdr: T) -> Parser { + let mut p = Parser { + rdr: rdr, + ch: Some('\x00'), + line: 1, + col: 0, + stack: Stack::new(), + state: ParseStart, + }; + p.bump(); + return p; + } + + /// Provides access to the current position in the logical structure of the + /// JSON stream. + pub fn stack<'l>(&'l self) -> &'l Stack { + return &self.stack; + } + + fn eof(&self) -> bool { self.ch.is_none() } + fn ch_or_null(&self) -> char { self.ch.unwrap_or('\x00') } + fn bump(&mut self) { + self.ch = self.rdr.next(); + + if self.ch_is('\n') { + self.line += 1; + self.col = 1; + } else { + self.col += 1; + } + } + + fn next_char(&mut self) -> Option { + self.bump(); + self.ch + } + fn ch_is(&self, c: char) -> bool { + self.ch == Some(c) + } + + fn error(&self, reason: ErrorCode) -> Result { + Err(SyntaxError(reason, self.line, self.col)) + } + + fn parse_whitespace(&mut self) { + while self.ch_is(' ') || + self.ch_is('\n') || + self.ch_is('\t') || + self.ch_is('\r') { self.bump(); } + } + + fn parse_number(&mut self) -> JsonEvent { + let mut neg = false; + + if self.ch_is('-') { + self.bump(); + neg = true; + } + + let res = match self.parse_u64() { + Ok(res) => res, + Err(e) => { return Error(e); } + }; + + if self.ch_is('.') || self.ch_is('e') || self.ch_is('E') { + let mut res = res as f64; + + if self.ch_is('.') { + res = match self.parse_decimal(res) { + Ok(res) => res, + Err(e) => { return Error(e); } + }; + } + + if self.ch_is('e') || self.ch_is('E') { + res = match self.parse_exponent(res) { + Ok(res) => res, + Err(e) => { return Error(e); } + }; + } + + if neg { + res *= -1.0; + } + + F64Value(res) + } else { + if neg { + // Make sure we don't underflow. + if res > (i64::MAX as u64) + 1 { + Error(SyntaxError(InvalidNumber, self.line, self.col)) + } else { + I64Value((!res + 1) as i64) + } + } else { + U64Value(res) + } + } + } + + fn parse_u64(&mut self) -> Result { + let mut accum: u64 = 0; + + match self.ch_or_null() { + '0' => { + self.bump(); + + // A leading '0' must be the only digit before the decimal point. + match self.ch_or_null() { + '0' ... '9' => return self.error(InvalidNumber), + _ => () + } + }, + '1' ... '9' => { + while !self.eof() { + match self.ch_or_null() { + c @ '0' ... '9' => { + macro_rules! try_or_invalid { + ($e: expr) => { + match $e { + Some(v) => v, + None => return self.error(InvalidNumber) + } + } + } + accum = try_or_invalid!(accum.checked_mul(10)); + accum = try_or_invalid!(accum.checked_add((c as u64) - ('0' as u64))); + + self.bump(); + } + _ => break, + } + } + } + _ => return self.error(InvalidNumber), + } + + Ok(accum) + } + + fn parse_decimal(&mut self, mut res: f64) -> Result { + self.bump(); + + // Make sure a digit follows the decimal place. + match self.ch_or_null() { + '0' ... '9' => (), + _ => return self.error(InvalidNumber) + } + + let mut dec = 1.0; + while !self.eof() { + match self.ch_or_null() { + c @ '0' ... '9' => { + dec /= 10.0; + res += (((c as isize) - ('0' as isize)) as f64) * dec; + self.bump(); + } + _ => break, + } + } + + Ok(res) + } + + fn parse_exponent(&mut self, mut res: f64) -> Result { + self.bump(); + + let mut exp = 0; + let mut neg_exp = false; + + if self.ch_is('+') { + self.bump(); + } else if self.ch_is('-') { + self.bump(); + neg_exp = true; + } + + // Make sure a digit follows the exponent place. + match self.ch_or_null() { + '0' ... '9' => (), + _ => return self.error(InvalidNumber) + } + while !self.eof() { + match self.ch_or_null() { + c @ '0' ... '9' => { + exp *= 10; + exp += (c as usize) - ('0' as usize); + + self.bump(); + } + _ => break + } + } + + let exp = 10_f64.powi(exp as i32); + if neg_exp { + res /= exp; + } else { + res *= exp; + } + + Ok(res) + } + + fn decode_hex_escape(&mut self) -> Result { + let mut i = 0; + let mut n = 0; + while i < 4 { + self.bump(); + n = match self.ch_or_null() { + c @ '0' ... '9' => n * 16 + ((c as u16) - ('0' as u16)), + c @ 'a' ... 'f' => n * 16 + (10 + (c as u16) - ('a' as u16)), + c @ 'A' ... 'F' => n * 16 + (10 + (c as u16) - ('A' as u16)), + _ => return self.error(InvalidEscape) + }; + + i += 1; + } + + Ok(n) + } + + fn parse_str(&mut self) -> Result { + let mut escape = false; + let mut res = string::String::new(); + + loop { + self.bump(); + if self.eof() { + return self.error(EOFWhileParsingString); + } + + if escape { + match self.ch_or_null() { + '"' => res.push('"'), + '\\' => res.push('\\'), + '/' => res.push('/'), + 'b' => res.push('\x08'), + 'f' => res.push('\x0c'), + 'n' => res.push('\n'), + 'r' => res.push('\r'), + 't' => res.push('\t'), + 'u' => match try!(self.decode_hex_escape()) { + 0xDC00 ... 0xDFFF => { + return self.error(LoneLeadingSurrogateInHexEscape) + } + + // Non-BMP characters are encoded as a sequence of + // two hex escapes, representing UTF-16 surrogates. + n1 @ 0xD800 ... 0xDBFF => { + match (self.next_char(), self.next_char()) { + (Some('\\'), Some('u')) => (), + _ => return self.error(UnexpectedEndOfHexEscape), + } + + let n2 = try!(self.decode_hex_escape()); + if n2 < 0xDC00 || n2 > 0xDFFF { + return self.error(LoneLeadingSurrogateInHexEscape) + } + let c = (((n1 - 0xD800) as u32) << 10 | + (n2 - 0xDC00) as u32) + 0x1_0000; + res.push(char::from_u32(c).unwrap()); + } + + n => match char::from_u32(n as u32) { + Some(c) => res.push(c), + None => return self.error(InvalidUnicodeCodePoint), + }, + }, + _ => return self.error(InvalidEscape), + } + escape = false; + } else if self.ch_is('\\') { + escape = true; + } else { + match self.ch { + Some('"') => { + self.bump(); + return Ok(res); + }, + Some(c) if c.is_control() => + return self.error(ControlCharacterInString), + Some(c) => res.push(c), + None => unreachable!() + } + } + } + } + + // Invoked at each iteration, consumes the stream until it has enough + // information to return a JsonEvent. + // Manages an internal state so that parsing can be interrupted and resumed. + // Also keeps track of the position in the logical structure of the json + // stream int the form of a stack that can be queried by the user using the + // stack() method. + fn parse(&mut self) -> JsonEvent { + loop { + // The only paths where the loop can spin a new iteration + // are in the cases ParseArrayComma and ParseObjectComma if ',' + // is parsed. In these cases the state is set to (respectively) + // ParseArray(false) and ParseObject(false), which always return, + // so there is no risk of getting stuck in an infinite loop. + // All other paths return before the end of the loop's iteration. + self.parse_whitespace(); + + match self.state { + ParseStart => { + return self.parse_start(); + } + ParseArray(first) => { + return self.parse_array(first); + } + ParseArrayComma => { + match self.parse_array_comma_or_end() { + Some(evt) => { return evt; } + None => {} + } + } + ParseObject(first) => { + return self.parse_object(first); + } + ParseObjectComma => { + self.stack.pop(); + if self.ch_is(',') { + self.state = ParseObject(false); + self.bump(); + } else { + return self.parse_object_end(); + } + } + _ => { + return self.error_event(InvalidSyntax); + } + } + } + } + + fn parse_start(&mut self) -> JsonEvent { + let val = self.parse_value(); + self.state = match val { + Error(_) => ParseFinished, + ArrayStart => ParseArray(true), + ObjectStart => ParseObject(true), + _ => ParseBeforeFinish, + }; + return val; + } + + fn parse_array(&mut self, first: bool) -> JsonEvent { + if self.ch_is(']') { + if !first { + self.error_event(InvalidSyntax) + } else { + self.state = if self.stack.is_empty() { + ParseBeforeFinish + } else if self.stack.last_is_index() { + ParseArrayComma + } else { + ParseObjectComma + }; + self.bump(); + ArrayEnd + } + } else { + if first { + self.stack.push_index(0); + } + let val = self.parse_value(); + self.state = match val { + Error(_) => ParseFinished, + ArrayStart => ParseArray(true), + ObjectStart => ParseObject(true), + _ => ParseArrayComma, + }; + val + } + } + + fn parse_array_comma_or_end(&mut self) -> Option { + if self.ch_is(',') { + self.stack.bump_index(); + self.state = ParseArray(false); + self.bump(); + None + } else if self.ch_is(']') { + self.stack.pop(); + self.state = if self.stack.is_empty() { + ParseBeforeFinish + } else if self.stack.last_is_index() { + ParseArrayComma + } else { + ParseObjectComma + }; + self.bump(); + Some(ArrayEnd) + } else if self.eof() { + Some(self.error_event(EOFWhileParsingArray)) + } else { + Some(self.error_event(InvalidSyntax)) + } + } + + fn parse_object(&mut self, first: bool) -> JsonEvent { + if self.ch_is('}') { + if !first { + if self.stack.is_empty() { + return self.error_event(TrailingComma); + } else { + self.stack.pop(); + } + } + self.state = if self.stack.is_empty() { + ParseBeforeFinish + } else if self.stack.last_is_index() { + ParseArrayComma + } else { + ParseObjectComma + }; + self.bump(); + return ObjectEnd; + } + if self.eof() { + return self.error_event(EOFWhileParsingObject); + } + if !self.ch_is('"') { + return self.error_event(KeyMustBeAString); + } + let s = match self.parse_str() { + Ok(s) => s, + Err(e) => { + self.state = ParseFinished; + return Error(e); + } + }; + self.parse_whitespace(); + if self.eof() { + return self.error_event(EOFWhileParsingObject); + } else if self.ch_or_null() != ':' { + return self.error_event(ExpectedColon); + } + self.stack.push_key(s); + self.bump(); + self.parse_whitespace(); + + let val = self.parse_value(); + + self.state = match val { + Error(_) => ParseFinished, + ArrayStart => ParseArray(true), + ObjectStart => ParseObject(true), + _ => ParseObjectComma, + }; + return val; + } + + fn parse_object_end(&mut self) -> JsonEvent { + if self.ch_is('}') { + self.state = if self.stack.is_empty() { + ParseBeforeFinish + } else if self.stack.last_is_index() { + ParseArrayComma + } else { + ParseObjectComma + }; + self.bump(); + ObjectEnd + } else if self.eof() { + self.error_event(EOFWhileParsingObject) + } else { + self.error_event(InvalidSyntax) + } + } + + fn parse_value(&mut self) -> JsonEvent { + if self.eof() { return self.error_event(EOFWhileParsingValue); } + match self.ch_or_null() { + 'n' => { self.parse_ident("ull", NullValue) } + 't' => { self.parse_ident("rue", BooleanValue(true)) } + 'f' => { self.parse_ident("alse", BooleanValue(false)) } + '0' ... '9' | '-' => self.parse_number(), + '"' => match self.parse_str() { + Ok(s) => StringValue(s), + Err(e) => Error(e), + }, + '[' => { + self.bump(); + ArrayStart + } + '{' => { + self.bump(); + ObjectStart + } + _ => { self.error_event(InvalidSyntax) } + } + } + + fn parse_ident(&mut self, ident: &str, value: JsonEvent) -> JsonEvent { + if ident.chars().all(|c| Some(c) == self.next_char()) { + self.bump(); + value + } else { + Error(SyntaxError(InvalidSyntax, self.line, self.col)) + } + } + + fn error_event(&mut self, reason: ErrorCode) -> JsonEvent { + self.state = ParseFinished; + Error(SyntaxError(reason, self.line, self.col)) + } +} + +/// A Builder consumes a json::Parser to create a generic Json structure. +pub struct Builder { + parser: Parser, + token: Option, +} + +impl> Builder { + /// Create a JSON Builder. + pub fn new(src: T) -> Builder { + Builder { parser: Parser::new(src), token: None, } + } + + // Decode a Json value from a Parser. + pub fn build(&mut self) -> Result { + self.bump(); + let result = self.build_value(); + self.bump(); + match self.token.take() { + None => {} + Some(Error(e)) => { return Err(e); } + ref tok => { panic!("unexpected token {:?}", tok); } + } + result + } + + fn bump(&mut self) { + self.token = self.parser.next(); + } + + fn build_value(&mut self) -> Result { + return match self.token.take() { + Some(NullValue) => Ok(Json::Null), + Some(I64Value(n)) => Ok(Json::I64(n)), + Some(U64Value(n)) => Ok(Json::U64(n)), + Some(F64Value(n)) => Ok(Json::F64(n)), + Some(BooleanValue(b)) => Ok(Json::Boolean(b)), + Some(StringValue(ref mut s)) => { + let mut temp = string::String::new(); + swap(s, &mut temp); + Ok(Json::String(temp)) + } + Some(Error(e)) => Err(e), + Some(ArrayStart) => self.build_array(), + Some(ObjectStart) => self.build_object(), + Some(ObjectEnd) => self.parser.error(InvalidSyntax), + Some(ArrayEnd) => self.parser.error(InvalidSyntax), + None => self.parser.error(EOFWhileParsingValue), + } + } + + fn build_array(&mut self) -> Result { + self.bump(); + let mut values = Vec::new(); + + loop { + if let Some(ArrayEnd) = self.token { + return Ok(Json::Array(values.into_iter().collect())); + } + match self.build_value() { + Ok(v) => values.push(v), + Err(e) => { return Err(e) } + } + self.bump(); + } + } + + fn build_object(&mut self) -> Result { + self.bump(); + + let mut values = BTreeMap::new(); + + loop { + match self.token.take() { + Some(ObjectEnd) => { return Ok(Json::Object(values)); } + Some(Error(e)) => { return Err(e); } + None => { break; } + token => { self.token = token; } + } + let key = match self.parser.stack().top() { + Some(StackElement::Key(k)) => { k.to_string() } + _ => { panic!("invalid state"); } + }; + match self.build_value() { + Ok(value) => { values.insert(key, value); } + Err(e) => { return Err(e); } + } + self.bump(); + } + return self.parser.error(EOFWhileParsingObject); + } +} + +/// A structure to decode JSON to values in rust. +pub struct Decoder { + stack: Vec, +} + +impl Decoder { + /// Creates a new decoder instance for decoding the specified JSON value. + pub fn new(json: Json) -> Decoder { + Decoder { stack: vec![json] } + } +} + +impl Decoder { + fn pop(&mut self) -> DecodeResult { + match self.stack.pop() { + Some(s) => Ok(s), + None => Err(EOF), + } + } +} + +macro_rules! expect { + ($e:expr, Null) => ({ + match try!($e) { + Json::Null => Ok(()), + other => Err(ExpectedError("Null".to_string(), + format!("{}", other))) + } + }); + ($e:expr, $t:ident) => ({ + match try!($e) { + Json::$t(v) => Ok(v), + other => { + Err(ExpectedError(stringify!($t).to_string(), + format!("{}", other))) + } + } + }) +} + +macro_rules! read_primitive { + ($name:ident, $ty:ident) => { + #[allow(unused_comparisons)] + fn $name(&mut self) -> DecodeResult<$ty> { + match try!(self.pop()) { + Json::I64(i) => { + let other = i as $ty; + if i == other as i64 && (other > 0) == (i > 0) { + Ok(other) + } else { + Err(ExpectedError("Number".to_string(), i.to_string())) + } + } + Json::U64(u) => { + let other = u as $ty; + if u == other as u64 && other >= 0 { + Ok(other) + } else { + Err(ExpectedError("Number".to_string(), u.to_string())) + } + } + Json::F64(f) => { + Err(ExpectedError("Integer".to_string(), f.to_string())) + } + // re: #12967.. a type w/ numeric keys (ie HashMap etc) + // is going to have a string here, as per JSON spec. + Json::String(s) => match s.parse() { + Ok(f) => Ok(f), + Err(_) => Err(ExpectedError("Number".to_string(), s)), + }, + value => { + Err(ExpectedError("Number".to_string(), value.to_string())) + } + } + } + } +} + +impl ::Decoder for Decoder { + type Error = DecoderError; + + fn read_nil(&mut self) -> DecodeResult<()> { + expect!(self.pop(), Null) + } + + read_primitive! { read_usize, usize } + read_primitive! { read_u8, u8 } + read_primitive! { read_u16, u16 } + read_primitive! { read_u32, u32 } + read_primitive! { read_u64, u64 } + read_primitive! { read_isize, isize } + read_primitive! { read_i8, i8 } + read_primitive! { read_i16, i16 } + read_primitive! { read_i32, i32 } + read_primitive! { read_i64, i64 } + + fn read_f32(&mut self) -> DecodeResult { + self.read_f64().map(|x| x as f32) + } + + fn read_f64(&mut self) -> DecodeResult { + match try!(self.pop()) { + Json::I64(f) => Ok(f as f64), + Json::U64(f) => Ok(f as f64), + Json::F64(f) => Ok(f), + Json::String(s) => { + // re: #12967.. a type w/ numeric keys (ie HashMap etc) + // is going to have a string here, as per JSON spec. + match s.parse() { + Ok(f) => Ok(f), + Err(_) => Err(ExpectedError("Number".to_string(), s)), + } + }, + Json::Null => Ok(f64::NAN), + value => Err(ExpectedError("Number".to_string(), format!("{}", value))) + } + } + + fn read_bool(&mut self) -> DecodeResult { + expect!(self.pop(), Boolean) + } + + fn read_char(&mut self) -> DecodeResult { + let s = try!(self.read_str()); + { + let mut it = s.chars(); + match (it.next(), it.next()) { + // exactly one character + (Some(c), None) => return Ok(c), + _ => () + } + } + Err(ExpectedError("single character string".to_string(), format!("{}", s))) + } + + fn read_str(&mut self) -> DecodeResult { + expect!(self.pop(), String) + } + + fn read_enum(&mut self, _name: &str, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + f(self) + } + + fn read_enum_variant(&mut self, names: &[&str], + mut f: F) -> DecodeResult + where F: FnMut(&mut Decoder, usize) -> DecodeResult, + { + let name = match try!(self.pop()) { + Json::String(s) => s, + Json::Object(mut o) => { + let n = match o.remove(&"variant".to_string()) { + Some(Json::String(s)) => s, + Some(val) => { + return Err(ExpectedError("String".to_string(), format!("{}", val))) + } + None => { + return Err(MissingFieldError("variant".to_string())) + } + }; + match o.remove(&"fields".to_string()) { + Some(Json::Array(l)) => { + for field in l.into_iter().rev() { + self.stack.push(field); + } + }, + Some(val) => { + return Err(ExpectedError("Array".to_string(), format!("{}", val))) + } + None => { + return Err(MissingFieldError("fields".to_string())) + } + } + n + } + json => { + return Err(ExpectedError("String or Object".to_string(), format!("{}", json))) + } + }; + let idx = match names.iter().position(|n| *n == name) { + Some(idx) => idx, + None => return Err(UnknownVariantError(name)) + }; + f(self, idx) + } + + fn read_enum_variant_arg(&mut self, _idx: usize, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + f(self) + } + + fn read_enum_struct_variant(&mut self, names: &[&str], f: F) -> DecodeResult where + F: FnMut(&mut Decoder, usize) -> DecodeResult, + { + self.read_enum_variant(names, f) + } + + + fn read_enum_struct_variant_field(&mut self, + _name: &str, + idx: usize, + f: F) + -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + self.read_enum_variant_arg(idx, f) + } + + fn read_struct(&mut self, _name: &str, _len: usize, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + let value = try!(f(self)); + try!(self.pop()); + Ok(value) + } + + fn read_struct_field(&mut self, + name: &str, + _idx: usize, + f: F) + -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + let mut obj = try!(expect!(self.pop(), Object)); + + let value = match obj.remove(&name.to_string()) { + None => { + // Add a Null and try to parse it as an Option<_> + // to get None as a default value. + self.stack.push(Json::Null); + match f(self) { + Ok(x) => x, + Err(_) => return Err(MissingFieldError(name.to_string())), + } + }, + Some(json) => { + self.stack.push(json); + try!(f(self)) + } + }; + self.stack.push(Json::Object(obj)); + Ok(value) + } + + fn read_tuple(&mut self, tuple_len: usize, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + self.read_seq(move |d, len| { + if len == tuple_len { + f(d) + } else { + Err(ExpectedError(format!("Tuple{}", tuple_len), format!("Tuple{}", len))) + } + }) + } + + fn read_tuple_arg(&mut self, idx: usize, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + self.read_seq_elt(idx, f) + } + + fn read_tuple_struct(&mut self, + _name: &str, + len: usize, + f: F) + -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + self.read_tuple(len, f) + } + + fn read_tuple_struct_arg(&mut self, + idx: usize, + f: F) + -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + self.read_tuple_arg(idx, f) + } + + fn read_option(&mut self, mut f: F) -> DecodeResult where + F: FnMut(&mut Decoder, bool) -> DecodeResult, + { + match try!(self.pop()) { + Json::Null => f(self, false), + value => { self.stack.push(value); f(self, true) } + } + } + + fn read_seq(&mut self, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder, usize) -> DecodeResult, + { + let array = try!(expect!(self.pop(), Array)); + let len = array.len(); + for v in array.into_iter().rev() { + self.stack.push(v); + } + f(self, len) + } + + fn read_seq_elt(&mut self, _idx: usize, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + f(self) + } + + fn read_map(&mut self, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder, usize) -> DecodeResult, + { + let obj = try!(expect!(self.pop(), Object)); + let len = obj.len(); + for (key, value) in obj.into_iter() { + self.stack.push(value); + self.stack.push(Json::String(key)); + } + f(self, len) + } + + fn read_map_elt_key(&mut self, _idx: usize, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + f(self) + } + + fn read_map_elt_val(&mut self, _idx: usize, f: F) -> DecodeResult where + F: FnOnce(&mut Decoder) -> DecodeResult, + { + f(self) + } + + fn error(&mut self, err: &str) -> DecoderError { + ApplicationError(err.to_string()) + } +} + +/// A trait for converting values to JSON +pub trait ToJson { + /// Converts the value of `self` to an instance of JSON + fn to_json(&self) -> Json; +} + +macro_rules! to_json_impl_i64 { + ($($t:ty), +) => ( + $(impl ToJson for $t { + fn to_json(&self) -> Json { Json::I64(*self as i64) } + })+ + ) +} + +to_json_impl_i64! { isize, i8, i16, i32, i64 } + +macro_rules! to_json_impl_u64 { + ($($t:ty), +) => ( + $(impl ToJson for $t { + fn to_json(&self) -> Json { Json::U64(*self as u64) } + })+ + ) +} + +to_json_impl_u64! { usize, u8, u16, u32, u64 } + +impl ToJson for Json { + fn to_json(&self) -> Json { self.clone() } +} + +impl ToJson for f32 { + fn to_json(&self) -> Json { (*self as f64).to_json() } +} + +impl ToJson for f64 { + fn to_json(&self) -> Json { + use std::num::FpCategory::{Nan, Infinite}; + + match self.classify() { + Nan | Infinite => Json::Null, + _ => Json::F64(*self) + } + } +} + +impl ToJson for () { + fn to_json(&self) -> Json { Json::Null } +} + +impl ToJson for bool { + fn to_json(&self) -> Json { Json::Boolean(*self) } +} + +impl ToJson for str { + fn to_json(&self) -> Json { Json::String(self.to_string()) } +} + +impl ToJson for string::String { + fn to_json(&self) -> Json { Json::String((*self).clone()) } +} + +macro_rules! tuple_impl { + // use variables to indicate the arity of the tuple + ($($tyvar:ident),* ) => { + // the trailing commas are for the 1 tuple + impl< + $( $tyvar : ToJson ),* + > ToJson for ( $( $tyvar ),* , ) { + + #[inline] + #[allow(non_snake_case)] + fn to_json(&self) -> Json { + match *self { + ($(ref $tyvar),*,) => Json::Array(vec![$($tyvar.to_json()),*]) + } + } + } + } +} + +tuple_impl!{A} +tuple_impl!{A, B} +tuple_impl!{A, B, C} +tuple_impl!{A, B, C, D} +tuple_impl!{A, B, C, D, E} +tuple_impl!{A, B, C, D, E, F} +tuple_impl!{A, B, C, D, E, F, G} +tuple_impl!{A, B, C, D, E, F, G, H} +tuple_impl!{A, B, C, D, E, F, G, H, I} +tuple_impl!{A, B, C, D, E, F, G, H, I, J} +tuple_impl!{A, B, C, D, E, F, G, H, I, J, K} +tuple_impl!{A, B, C, D, E, F, G, H, I, J, K, L} + +impl ToJson for [A] { + fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } +} + +impl ToJson for Vec { + fn to_json(&self) -> Json { Json::Array(self.iter().map(|elt| elt.to_json()).collect()) } +} + +impl ToJson for BTreeMap { + fn to_json(&self) -> Json { + let mut d = BTreeMap::new(); + for (key, value) in self.iter() { + d.insert((*key).clone(), value.to_json()); + } + Json::Object(d) + } +} + +impl ToJson for HashMap { + fn to_json(&self) -> Json { + let mut d = BTreeMap::new(); + for (key, value) in self.iter() { + d.insert((*key).clone(), value.to_json()); + } + Json::Object(d) + } +} + +impl ToJson for Option { + fn to_json(&self) -> Json { + match *self { + None => Json::Null, + Some(ref value) => value.to_json() + } + } +} + +struct FormatShim<'a, 'b: 'a> { + inner: &'a mut fmt::Formatter<'b>, +} + +impl<'a, 'b> fmt::Write for FormatShim<'a, 'b> { + fn write_str(&mut self, s: &str) -> fmt::Result { + match self.inner.write_str(s) { + Ok(_) => Ok(()), + Err(_) => Err(fmt::Error) + } + } +} + +impl fmt::Display for Json { + /// Encodes a json value into a string + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut shim = FormatShim { inner: f }; + let mut encoder = Encoder::new(&mut shim); + match self.encode(&mut encoder) { + Ok(_) => Ok(()), + Err(_) => Err(fmt::Error) + } + } +} + +impl<'a> fmt::Display for PrettyJson<'a> { + /// Encodes a json value into a string + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut shim = FormatShim { inner: f }; + let mut encoder = Encoder::new_pretty(&mut shim); + match self.inner.encode(&mut encoder) { + Ok(_) => Ok(()), + Err(_) => Err(fmt::Error) + } + } +} + +impl<'a, T: Encodable> fmt::Display for AsJson<'a, T> { + /// Encodes a json value into a string + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut shim = FormatShim { inner: f }; + let mut encoder = Encoder::new(&mut shim); + match self.inner.encode(&mut encoder) { + Ok(_) => Ok(()), + Err(_) => Err(fmt::Error) + } + } +} + +impl<'a, T> AsPrettyJson<'a, T> { + /// Set the indentation level for the emitted JSON + pub fn indent(mut self, indent: u32) -> AsPrettyJson<'a, T> { + self.indent = Some(indent); + self + } +} + +impl<'a, T: Encodable> fmt::Display for AsPrettyJson<'a, T> { + /// Encodes a json value into a string + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut shim = FormatShim { inner: f }; + let mut encoder = Encoder::new_pretty(&mut shim); + if let Some(n) = self.indent { + // unwrap cannot panic for pretty encoders + let _ = encoder.set_indent(n); + } + match self.inner.encode(&mut encoder) { + Ok(_) => Ok(()), + Err(_) => Err(fmt::Error) + } + } +} + +impl FromStr for Json { + type Err = ParserError; + fn from_str(s: &str) -> Result { + Json::from_str(s) + } +} + +#[cfg(test)] +mod tests { + use self::Animal::*; + use self::DecodeEnum::*; + use {Encodable, Decodable}; + use super::Json::*; + use super::ErrorCode::*; + use super::ParserError::*; + use super::DecoderError::*; + use super::JsonEvent::*; + use super::StackElement::*; + use super::{Json, DecodeResult, DecoderError, JsonEvent, Parser, + StackElement, Stack, Decoder, Encoder, EncoderError}; + use std::{i64, u64, f32, f64}; + use std::collections::BTreeMap; + use std::string; + + #[derive(RustcDecodable, Eq, PartialEq, Debug)] + struct OptionData { + opt: Option, + } + + #[test] + fn test_decode_option_none() { + let s ="{}"; + let obj: OptionData = super::decode(s).unwrap(); + assert_eq!(obj, OptionData { opt: None }); + } + + #[test] + fn test_decode_option_some() { + let s = "{ \"opt\": 10 }"; + let obj: OptionData = super::decode(s).unwrap(); + assert_eq!(obj, OptionData { opt: Some(10) }); + } + + #[test] + fn test_decode_option_malformed() { + check_err::("{ \"opt\": [] }", + ExpectedError("Number".to_string(), "[]".to_string())); + check_err::("{ \"opt\": false }", + ExpectedError("Number".to_string(), "false".to_string())); + } + + #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] + enum Animal { + Dog, + Frog(string::String, isize) + } + + #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] + struct Inner { + a: (), + b: usize, + c: Vec, + } + + #[derive(PartialEq, RustcEncodable, RustcDecodable, Debug)] + struct Outer { + inner: Vec, + } + + fn mk_object(items: &[(string::String, Json)]) -> Json { + let mut d = BTreeMap::new(); + + for item in items.iter() { + match *item { + (ref key, ref value) => { d.insert((*key).clone(), (*value).clone()); }, + } + }; + + Object(d) + } + + #[test] + fn test_from_str_trait() { + let s = "null"; + assert!(s.parse::().unwrap() == s.parse().unwrap()); + } + + #[test] + fn test_write_null() { + assert_eq!(Null.to_string(), "null"); + assert_eq!(Null.pretty().to_string(), "null"); + } + + #[test] + fn test_write_i64() { + assert_eq!(U64(0).to_string(), "0"); + assert_eq!(U64(0).pretty().to_string(), "0"); + + assert_eq!(U64(1234).to_string(), "1234"); + assert_eq!(U64(1234).pretty().to_string(), "1234"); + + assert_eq!(I64(-5678).to_string(), "-5678"); + assert_eq!(I64(-5678).pretty().to_string(), "-5678"); + + assert_eq!(U64(7650007200025252000).to_string(), "7650007200025252000"); + assert_eq!(U64(7650007200025252000).pretty().to_string(), "7650007200025252000"); + } + + #[test] + fn test_write_f64() { + assert_eq!(F64(3.0).to_string(), "3.0"); + assert_eq!(F64(3.0).pretty().to_string(), "3.0"); + + assert_eq!(F64(3.1).to_string(), "3.1"); + assert_eq!(F64(3.1).pretty().to_string(), "3.1"); + + assert_eq!(F64(-1.5).to_string(), "-1.5"); + assert_eq!(F64(-1.5).pretty().to_string(), "-1.5"); + + assert_eq!(F64(0.5).to_string(), "0.5"); + assert_eq!(F64(0.5).pretty().to_string(), "0.5"); + + assert_eq!(F64(f64::NAN).to_string(), "null"); + assert_eq!(F64(f64::NAN).pretty().to_string(), "null"); + + assert_eq!(F64(f64::INFINITY).to_string(), "null"); + assert_eq!(F64(f64::INFINITY).pretty().to_string(), "null"); + + assert_eq!(F64(f64::NEG_INFINITY).to_string(), "null"); + assert_eq!(F64(f64::NEG_INFINITY).pretty().to_string(), "null"); + } + + #[test] + fn test_write_str() { + assert_eq!(String("".to_string()).to_string(), "\"\""); + assert_eq!(String("".to_string()).pretty().to_string(), "\"\""); + + assert_eq!(String("homura".to_string()).to_string(), "\"homura\""); + assert_eq!(String("madoka".to_string()).pretty().to_string(), "\"madoka\""); + } + + #[test] + fn test_write_bool() { + assert_eq!(Boolean(true).to_string(), "true"); + assert_eq!(Boolean(true).pretty().to_string(), "true"); + + assert_eq!(Boolean(false).to_string(), "false"); + assert_eq!(Boolean(false).pretty().to_string(), "false"); + } + + #[test] + fn test_write_array() { + assert_eq!(Array(vec![]).to_string(), "[]"); + assert_eq!(Array(vec![]).pretty().to_string(), "[]"); + + assert_eq!(Array(vec![Boolean(true)]).to_string(), "[true]"); + assert_eq!( + Array(vec![Boolean(true)]).pretty().to_string(), + "\ + [\n \ + true\n\ + ]" + ); + + let long_test_array = Array(vec![ + Boolean(false), + Null, + Array(vec![String("foo\nbar".to_string()), F64(3.5)])]); + + assert_eq!(long_test_array.to_string(), + "[false,null,[\"foo\\nbar\",3.5]]"); + assert_eq!( + long_test_array.pretty().to_string(), + "\ + [\n \ + false,\n \ + null,\n \ + [\n \ + \"foo\\nbar\",\n \ + 3.5\n \ + ]\n\ + ]" + ); + } + + #[test] + fn test_write_object() { + assert_eq!(mk_object(&[]).to_string(), "{}"); + assert_eq!(mk_object(&[]).pretty().to_string(), "{}"); + + assert_eq!( + mk_object(&[ + ("a".to_string(), Boolean(true)) + ]).to_string(), + "{\"a\":true}" + ); + assert_eq!( + mk_object(&[("a".to_string(), Boolean(true))]).pretty().to_string(), + "\ + {\n \ + \"a\": true\n\ + }" + ); + + let complex_obj = mk_object(&[ + ("b".to_string(), Array(vec![ + mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), + mk_object(&[("d".to_string(), String("".to_string()))]) + ])) + ]); + + assert_eq!( + complex_obj.to_string(), + "{\ + \"b\":[\ + {\"c\":\"\\f\\r\"},\ + {\"d\":\"\"}\ + ]\ + }" + ); + assert_eq!( + complex_obj.pretty().to_string(), + "\ + {\n \ + \"b\": [\n \ + {\n \ + \"c\": \"\\f\\r\"\n \ + },\n \ + {\n \ + \"d\": \"\"\n \ + }\n \ + ]\n\ + }" + ); + + let a = mk_object(&[ + ("a".to_string(), Boolean(true)), + ("b".to_string(), Array(vec![ + mk_object(&[("c".to_string(), String("\x0c\r".to_string()))]), + mk_object(&[("d".to_string(), String("".to_string()))]) + ])) + ]); + + // We can't compare the strings directly because the object fields be + // printed in a different order. + assert_eq!(a.clone(), a.to_string().parse().unwrap()); + assert_eq!(a.clone(), a.pretty().to_string().parse().unwrap()); + } + + #[test] + fn test_write_enum() { + let animal = Dog; + assert_eq!( + format!("{}", super::as_json(&animal)), + "\"Dog\"" + ); + assert_eq!( + format!("{}", super::as_pretty_json(&animal)), + "\"Dog\"" + ); + + let animal = Frog("Henry".to_string(), 349); + assert_eq!( + format!("{}", super::as_json(&animal)), + "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}" + ); + assert_eq!( + format!("{}", super::as_pretty_json(&animal)), + "{\n \ + \"variant\": \"Frog\",\n \ + \"fields\": [\n \ + \"Henry\",\n \ + 349\n \ + ]\n\ + }" + ); + } + + macro_rules! check_encoder_for_simple { + ($value:expr, $expected:expr) => ({ + let s = format!("{}", super::as_json(&$value)); + assert_eq!(s, $expected); + + let s = format!("{}", super::as_pretty_json(&$value)); + assert_eq!(s, $expected); + }) + } + + #[test] + fn test_write_some() { + check_encoder_for_simple!(Some("jodhpurs".to_string()), "\"jodhpurs\""); + } + + #[test] + fn test_write_none() { + check_encoder_for_simple!(None::, "null"); + } + + #[test] + fn test_write_char() { + check_encoder_for_simple!('a', "\"a\""); + check_encoder_for_simple!('\t', "\"\\t\""); + check_encoder_for_simple!('\u{0000}', "\"\\u0000\""); + check_encoder_for_simple!('\u{001b}', "\"\\u001b\""); + check_encoder_for_simple!('\u{007f}', "\"\\u007f\""); + check_encoder_for_simple!('\u{00a0}', "\"\u{00a0}\""); + check_encoder_for_simple!('\u{abcd}', "\"\u{abcd}\""); + check_encoder_for_simple!('\u{10ffff}', "\"\u{10ffff}\""); + } + + #[test] + fn test_trailing_characters() { + assert_eq!(Json::from_str("nulla"), Err(SyntaxError(TrailingCharacters, 1, 5))); + assert_eq!(Json::from_str("truea"), Err(SyntaxError(TrailingCharacters, 1, 5))); + assert_eq!(Json::from_str("falsea"), Err(SyntaxError(TrailingCharacters, 1, 6))); + assert_eq!(Json::from_str("1a"), Err(SyntaxError(TrailingCharacters, 1, 2))); + assert_eq!(Json::from_str("[]a"), Err(SyntaxError(TrailingCharacters, 1, 3))); + assert_eq!(Json::from_str("{}a"), Err(SyntaxError(TrailingCharacters, 1, 3))); + } + + #[test] + fn test_read_identifiers() { + assert_eq!(Json::from_str("n"), Err(SyntaxError(InvalidSyntax, 1, 2))); + assert_eq!(Json::from_str("nul"), Err(SyntaxError(InvalidSyntax, 1, 4))); + assert_eq!(Json::from_str("t"), Err(SyntaxError(InvalidSyntax, 1, 2))); + assert_eq!(Json::from_str("truz"), Err(SyntaxError(InvalidSyntax, 1, 4))); + assert_eq!(Json::from_str("f"), Err(SyntaxError(InvalidSyntax, 1, 2))); + assert_eq!(Json::from_str("faz"), Err(SyntaxError(InvalidSyntax, 1, 3))); + + assert_eq!(Json::from_str("null"), Ok(Null)); + assert_eq!(Json::from_str("true"), Ok(Boolean(true))); + assert_eq!(Json::from_str("false"), Ok(Boolean(false))); + assert_eq!(Json::from_str(" null "), Ok(Null)); + assert_eq!(Json::from_str(" true "), Ok(Boolean(true))); + assert_eq!(Json::from_str(" false "), Ok(Boolean(false))); + } + + #[test] + fn test_decode_identifiers() { + let v: () = super::decode("null").unwrap(); + assert_eq!(v, ()); + + let v: bool = super::decode("true").unwrap(); + assert_eq!(v, true); + + let v: bool = super::decode("false").unwrap(); + assert_eq!(v, false); + } + + #[test] + fn test_read_number() { + assert_eq!(Json::from_str("+"), Err(SyntaxError(InvalidSyntax, 1, 1))); + assert_eq!(Json::from_str("."), Err(SyntaxError(InvalidSyntax, 1, 1))); + assert_eq!(Json::from_str("NaN"), Err(SyntaxError(InvalidSyntax, 1, 1))); + assert_eq!(Json::from_str("-"), Err(SyntaxError(InvalidNumber, 1, 2))); + assert_eq!(Json::from_str("00"), Err(SyntaxError(InvalidNumber, 1, 2))); + assert_eq!(Json::from_str("1."), Err(SyntaxError(InvalidNumber, 1, 3))); + assert_eq!(Json::from_str("1e"), Err(SyntaxError(InvalidNumber, 1, 3))); + assert_eq!(Json::from_str("1e+"), Err(SyntaxError(InvalidNumber, 1, 4))); + + assert_eq!(Json::from_str("18446744073709551616"), Err(SyntaxError(InvalidNumber, 1, 20))); + assert_eq!(Json::from_str("18446744073709551617"), Err(SyntaxError(InvalidNumber, 1, 20))); + assert_eq!(Json::from_str("-9223372036854775809"), Err(SyntaxError(InvalidNumber, 1, 21))); + + assert_eq!(Json::from_str("3"), Ok(U64(3))); + assert_eq!(Json::from_str("3.1"), Ok(F64(3.1))); + assert_eq!(Json::from_str("-1.2"), Ok(F64(-1.2))); + assert_eq!(Json::from_str("0.4"), Ok(F64(0.4))); + assert_eq!(Json::from_str("0.4e5"), Ok(F64(0.4e5))); + assert_eq!(Json::from_str("0.4e+15"), Ok(F64(0.4e15))); + assert_eq!(Json::from_str("0.4e-01"), Ok(F64(0.4e-01))); + assert_eq!(Json::from_str(" 3 "), Ok(U64(3))); + + assert_eq!(Json::from_str("-9223372036854775808"), Ok(I64(i64::MIN))); + assert_eq!(Json::from_str("9223372036854775807"), Ok(U64(i64::MAX as u64))); + assert_eq!(Json::from_str("18446744073709551615"), Ok(U64(u64::MAX))); + } + + #[test] + fn test_decode_numbers() { + let v: f64 = super::decode("3").unwrap(); + assert_eq!(v, 3.0); + + let v: f64 = super::decode("3.1").unwrap(); + assert_eq!(v, 3.1); + + let v: f64 = super::decode("-1.2").unwrap(); + assert_eq!(v, -1.2); + + let v: f64 = super::decode("0.4").unwrap(); + assert_eq!(v, 0.4); + + let v: f64 = super::decode("0.4e5").unwrap(); + assert_eq!(v, 0.4e5); + + let v: f64 = super::decode("0.4e15").unwrap(); + assert_eq!(v, 0.4e15); + + let v: f64 = super::decode("0.4e-01").unwrap(); + assert_eq!(v, 0.4e-01); + + let v: u64 = super::decode("0").unwrap(); + assert_eq!(v, 0); + + let v: u64 = super::decode("18446744073709551615").unwrap(); + assert_eq!(v, u64::MAX); + + let v: i64 = super::decode("-9223372036854775808").unwrap(); + assert_eq!(v, i64::MIN); + + let v: i64 = super::decode("9223372036854775807").unwrap(); + assert_eq!(v, i64::MAX); + + let res: DecodeResult = super::decode("765.25252"); + match res { + Ok(..) => panic!("expected an error"), + Err(ExpectedError(ref s, _)) => assert_eq!(s, "Integer"), + Err(..) => panic!("expected an 'expected integer' error"), + } + } + + #[test] + fn test_read_str() { + assert_eq!(Json::from_str("\""), Err(SyntaxError(EOFWhileParsingString, 1, 2))); + assert_eq!(Json::from_str("\"lol"), Err(SyntaxError(EOFWhileParsingString, 1, 5))); + assert_eq!(Json::from_str("\"\n\""), Err(SyntaxError(ControlCharacterInString, 2, 1))); + + assert_eq!(Json::from_str("\"\""), Ok(String("".to_string()))); + assert_eq!(Json::from_str("\"foo\""), Ok(String("foo".to_string()))); + assert_eq!(Json::from_str("\"\\\"\""), Ok(String("\"".to_string()))); + assert_eq!(Json::from_str("\"\\b\""), Ok(String("\x08".to_string()))); + assert_eq!(Json::from_str("\"\\n\""), Ok(String("\n".to_string()))); + assert_eq!(Json::from_str("\"\\r\""), Ok(String("\r".to_string()))); + assert_eq!(Json::from_str("\"\\t\""), Ok(String("\t".to_string()))); + assert_eq!(Json::from_str(" \"foo\" "), Ok(String("foo".to_string()))); + assert_eq!(Json::from_str("\"\\u12ab\""), Ok(String("\u{12ab}".to_string()))); + assert_eq!(Json::from_str("\"\\uAB12\""), Ok(String("\u{AB12}".to_string()))); + } + + #[test] + fn test_decode_str() { + let s = [("\"\"", ""), + ("\"foo\"", "foo"), + ("\"\\\"\"", "\""), + ("\"\\b\"", "\x08"), + ("\"\\n\"", "\n"), + ("\"\\r\"", "\r"), + ("\"\\t\"", "\t"), + ("\"\\u12ab\"", "\u{12ab}"), + ("\"\\uAB12\"", "\u{AB12}")]; + + for &(i, o) in s.iter() { + let v: string::String = super::decode(i).unwrap(); + assert_eq!(v, o); + } + } + + #[test] + fn test_read_array() { + assert_eq!(Json::from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); + assert_eq!(Json::from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); + assert_eq!(Json::from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); + assert_eq!(Json::from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); + assert_eq!(Json::from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); + + assert_eq!(Json::from_str("[]"), Ok(Array(vec![]))); + assert_eq!(Json::from_str("[ ]"), Ok(Array(vec![]))); + assert_eq!(Json::from_str("[true]"), Ok(Array(vec![Boolean(true)]))); + assert_eq!(Json::from_str("[ false ]"), Ok(Array(vec![Boolean(false)]))); + assert_eq!(Json::from_str("[null]"), Ok(Array(vec![Null]))); + assert_eq!(Json::from_str("[3, 1]"), + Ok(Array(vec![U64(3), U64(1)]))); + assert_eq!(Json::from_str("\n[3, 2]\n"), + Ok(Array(vec![U64(3), U64(2)]))); + assert_eq!(Json::from_str("[2, [4, 1]]"), + Ok(Array(vec![U64(2), Array(vec![U64(4), U64(1)])]))); + } + + #[test] + fn test_decode_array() { + let v: Vec<()> = super::decode("[]").unwrap(); + assert_eq!(v, vec![]); + + let v: Vec<()> = super::decode("[null]").unwrap(); + assert_eq!(v, vec![()]); + + let v: Vec = super::decode("[true]").unwrap(); + assert_eq!(v, vec![true]); + + let v: Vec = super::decode("[3, 1]").unwrap(); + assert_eq!(v, vec![3, 1]); + + let v: Vec> = super::decode("[[3], [1, 2]]").unwrap(); + assert_eq!(v, vec![vec![3], vec![1, 2]]); + } + + #[test] + fn test_decode_tuple() { + let t: (usize, usize, usize) = super::decode("[1, 2, 3]").unwrap(); + assert_eq!(t, (1, 2, 3)); + + let t: (usize, string::String) = super::decode("[1, \"two\"]").unwrap(); + assert_eq!(t, (1, "two".to_string())); + } + + #[test] + fn test_decode_tuple_malformed_types() { + assert!(super::decode::<(usize, string::String)>("[1, 2]").is_err()); + } + + #[test] + fn test_decode_tuple_malformed_length() { + assert!(super::decode::<(usize, usize)>("[1, 2, 3]").is_err()); + } + + #[test] + fn test_read_object() { + assert_eq!(Json::from_str("{"), Err(SyntaxError(EOFWhileParsingObject, 1, 2))); + assert_eq!(Json::from_str("{ "), Err(SyntaxError(EOFWhileParsingObject, 1, 3))); + assert_eq!(Json::from_str("{1"), Err(SyntaxError(KeyMustBeAString, 1, 2))); + assert_eq!(Json::from_str("{ \"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); + assert_eq!(Json::from_str("{\"a\""), Err(SyntaxError(EOFWhileParsingObject, 1, 5))); + assert_eq!(Json::from_str("{\"a\" "), Err(SyntaxError(EOFWhileParsingObject, 1, 6))); + + assert_eq!(Json::from_str("{\"a\" 1"), Err(SyntaxError(ExpectedColon, 1, 6))); + assert_eq!(Json::from_str("{\"a\":"), Err(SyntaxError(EOFWhileParsingValue, 1, 6))); + assert_eq!(Json::from_str("{\"a\":1"), Err(SyntaxError(EOFWhileParsingObject, 1, 7))); + assert_eq!(Json::from_str("{\"a\":1 1"), Err(SyntaxError(InvalidSyntax, 1, 8))); + assert_eq!(Json::from_str("{\"a\":1,"), Err(SyntaxError(EOFWhileParsingObject, 1, 8))); + + assert_eq!(Json::from_str("{}").unwrap(), mk_object(&[])); + assert_eq!(Json::from_str("{\"a\": 3}").unwrap(), + mk_object(&[("a".to_string(), U64(3))])); + + assert_eq!(Json::from_str( + "{ \"a\": null, \"b\" : true }").unwrap(), + mk_object(&[ + ("a".to_string(), Null), + ("b".to_string(), Boolean(true))])); + assert_eq!(Json::from_str("\n{ \"a\": null, \"b\" : true }\n").unwrap(), + mk_object(&[ + ("a".to_string(), Null), + ("b".to_string(), Boolean(true))])); + assert_eq!(Json::from_str( + "{\"a\" : 1.0 ,\"b\": [ true ]}").unwrap(), + mk_object(&[ + ("a".to_string(), F64(1.0)), + ("b".to_string(), Array(vec![Boolean(true)])) + ])); + assert_eq!(Json::from_str( + "{\ + \"a\": 1.0, \ + \"b\": [\ + true,\ + \"foo\\nbar\", \ + { \"c\": {\"d\": null} } \ + ]\ + }").unwrap(), + mk_object(&[ + ("a".to_string(), F64(1.0)), + ("b".to_string(), Array(vec![ + Boolean(true), + String("foo\nbar".to_string()), + mk_object(&[ + ("c".to_string(), mk_object(&[("d".to_string(), Null)])) + ]) + ])) + ])); + } + + #[test] + fn test_decode_struct() { + let s = "{ + \"inner\": [ + { \"a\": null, \"b\": 2, \"c\": [\"abc\", \"xyz\"] } + ] + }"; + + let v: Outer = super::decode(s).unwrap(); + assert_eq!( + v, + Outer { + inner: vec![ + Inner { a: (), b: 2, c: vec!["abc".to_string(), "xyz".to_string()] } + ] + } + ); + } + + #[derive(RustcDecodable)] + struct FloatStruct { + f: f64, + a: Vec + } + #[test] + fn test_decode_struct_with_nan() { + let s = "{\"f\":null,\"a\":[null,123]}"; + let obj: FloatStruct = super::decode(s).unwrap(); + assert!(obj.f.is_nan()); + assert!(obj.a[0].is_nan()); + assert_eq!(obj.a[1], 123f64); + } + + #[test] + fn test_decode_option() { + let value: Option = super::decode("null").unwrap(); + assert_eq!(value, None); + + let value: Option = super::decode("\"jodhpurs\"").unwrap(); + assert_eq!(value, Some("jodhpurs".to_string())); + } + + #[test] + fn test_decode_enum() { + let value: Animal = super::decode("\"Dog\"").unwrap(); + assert_eq!(value, Dog); + + let s = "{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}"; + let value: Animal = super::decode(s).unwrap(); + assert_eq!(value, Frog("Henry".to_string(), 349)); + } + + #[test] + fn test_decode_map() { + let s = "{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\ + \"fields\":[\"Henry\", 349]}}"; + let mut map: BTreeMap = super::decode(s).unwrap(); + + assert_eq!(map.remove(&"a".to_string()), Some(Dog)); + assert_eq!(map.remove(&"b".to_string()), Some(Frog("Henry".to_string(), 349))); + } + + #[test] + fn test_multiline_errors() { + assert_eq!(Json::from_str("{\n \"foo\":\n \"bar\""), + Err(SyntaxError(EOFWhileParsingObject, 3, 8))); + } + + #[derive(RustcDecodable)] + #[allow(dead_code)] + struct DecodeStruct { + x: f64, + y: bool, + z: string::String, + w: Vec + } + #[derive(RustcDecodable)] + enum DecodeEnum { + A(f64), + B(string::String) + } + fn check_err(to_parse: &'static str, expected: DecoderError) { + let res: DecodeResult = match Json::from_str(to_parse) { + Err(e) => Err(ParseError(e)), + Ok(json) => Decodable::decode(&mut Decoder::new(json)) + }; + match res { + Ok(_) => panic!("`{:?}` parsed & decoded ok, expecting error `{:?}`", + to_parse, expected), + Err(ParseError(e)) => panic!("`{}` is not valid json: {:?}", + to_parse, e), + Err(e) => { + assert_eq!(e, expected); + } + } + } + #[test] + fn test_decode_errors_struct() { + check_err::("[]", ExpectedError("Object".to_string(), "[]".to_string())); + check_err::("{\"x\": true, \"y\": true, \"z\": \"\", \"w\": []}", + ExpectedError("Number".to_string(), "true".to_string())); + check_err::("{\"x\": 1, \"y\": [], \"z\": \"\", \"w\": []}", + ExpectedError("Boolean".to_string(), "[]".to_string())); + check_err::("{\"x\": 1, \"y\": true, \"z\": {}, \"w\": []}", + ExpectedError("String".to_string(), "{}".to_string())); + check_err::("{\"x\": 1, \"y\": true, \"z\": \"\", \"w\": null}", + ExpectedError("Array".to_string(), "null".to_string())); + check_err::("{\"x\": 1, \"y\": true, \"z\": \"\"}", + MissingFieldError("w".to_string())); + } + #[test] + fn test_decode_errors_enum() { + check_err::("{}", + MissingFieldError("variant".to_string())); + check_err::("{\"variant\": 1}", + ExpectedError("String".to_string(), "1".to_string())); + check_err::("{\"variant\": \"A\"}", + MissingFieldError("fields".to_string())); + check_err::("{\"variant\": \"A\", \"fields\": null}", + ExpectedError("Array".to_string(), "null".to_string())); + check_err::("{\"variant\": \"C\", \"fields\": []}", + UnknownVariantError("C".to_string())); + } + + #[test] + fn test_find(){ + let json_value = Json::from_str("{\"dog\" : \"cat\"}").unwrap(); + let found_str = json_value.find("dog"); + assert!(found_str.unwrap().as_string().unwrap() == "cat"); + } + + #[test] + fn test_find_path(){ + let json_value = Json::from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); + let found_str = json_value.find_path(&["dog", "cat", "mouse"]); + assert!(found_str.unwrap().as_string().unwrap() == "cheese"); + } + + #[test] + fn test_search(){ + let json_value = Json::from_str("{\"dog\":{\"cat\": {\"mouse\" : \"cheese\"}}}").unwrap(); + let found_str = json_value.search("mouse").and_then(|j| j.as_string()); + assert!(found_str.unwrap() == "cheese"); + } + + #[test] + fn test_index(){ + let json_value = Json::from_str("{\"animals\":[\"dog\",\"cat\",\"mouse\"]}").unwrap(); + let ref array = json_value["animals"]; + assert_eq!(array[0].as_string().unwrap(), "dog"); + assert_eq!(array[1].as_string().unwrap(), "cat"); + assert_eq!(array[2].as_string().unwrap(), "mouse"); + } + + #[test] + fn test_is_object(){ + let json_value = Json::from_str("{}").unwrap(); + assert!(json_value.is_object()); + } + + #[test] + fn test_as_object(){ + let json_value = Json::from_str("{}").unwrap(); + let json_object = json_value.as_object(); + assert!(json_object.is_some()); + } + + #[test] + fn test_is_array(){ + let json_value = Json::from_str("[1, 2, 3]").unwrap(); + assert!(json_value.is_array()); + } + + #[test] + fn test_as_array(){ + let json_value = Json::from_str("[1, 2, 3]").unwrap(); + let json_array = json_value.as_array(); + let expected_length = 3; + assert!(json_array.is_some() && json_array.unwrap().len() == expected_length); + } + + #[test] + fn test_is_string(){ + let json_value = Json::from_str("\"dog\"").unwrap(); + assert!(json_value.is_string()); + } + + #[test] + fn test_as_string(){ + let json_value = Json::from_str("\"dog\"").unwrap(); + let json_str = json_value.as_string(); + let expected_str = "dog"; + assert_eq!(json_str, Some(expected_str)); + } + + #[test] + fn test_is_number(){ + let json_value = Json::from_str("12").unwrap(); + assert!(json_value.is_number()); + } + + #[test] + fn test_is_i64(){ + let json_value = Json::from_str("-12").unwrap(); + assert!(json_value.is_i64()); + + let json_value = Json::from_str("12").unwrap(); + assert!(!json_value.is_i64()); + + let json_value = Json::from_str("12.0").unwrap(); + assert!(!json_value.is_i64()); + } + + #[test] + fn test_is_u64(){ + let json_value = Json::from_str("12").unwrap(); + assert!(json_value.is_u64()); + + let json_value = Json::from_str("-12").unwrap(); + assert!(!json_value.is_u64()); + + let json_value = Json::from_str("12.0").unwrap(); + assert!(!json_value.is_u64()); + } + + #[test] + fn test_is_f64(){ + let json_value = Json::from_str("12").unwrap(); + assert!(!json_value.is_f64()); + + let json_value = Json::from_str("-12").unwrap(); + assert!(!json_value.is_f64()); + + let json_value = Json::from_str("12.0").unwrap(); + assert!(json_value.is_f64()); + + let json_value = Json::from_str("-12.0").unwrap(); + assert!(json_value.is_f64()); + } + + #[test] + fn test_as_i64(){ + let json_value = Json::from_str("-12").unwrap(); + let json_num = json_value.as_i64(); + assert_eq!(json_num, Some(-12)); + } + + #[test] + fn test_as_u64(){ + let json_value = Json::from_str("12").unwrap(); + let json_num = json_value.as_u64(); + assert_eq!(json_num, Some(12)); + } + + #[test] + fn test_as_f64(){ + let json_value = Json::from_str("12.0").unwrap(); + let json_num = json_value.as_f64(); + assert_eq!(json_num, Some(12f64)); + } + + #[test] + fn test_is_boolean(){ + let json_value = Json::from_str("false").unwrap(); + assert!(json_value.is_boolean()); + } + + #[test] + fn test_as_boolean(){ + let json_value = Json::from_str("false").unwrap(); + let json_bool = json_value.as_boolean(); + let expected_bool = false; + assert!(json_bool.is_some() && json_bool.unwrap() == expected_bool); + } + + #[test] + fn test_is_null(){ + let json_value = Json::from_str("null").unwrap(); + assert!(json_value.is_null()); + } + + #[test] + fn test_as_null(){ + let json_value = Json::from_str("null").unwrap(); + let json_null = json_value.as_null(); + let expected_null = (); + assert!(json_null.is_some() && json_null.unwrap() == expected_null); + } + + #[test] + fn test_encode_hashmap_with_numeric_key() { + use std::collections::HashMap; + let mut hm: HashMap = HashMap::new(); + hm.insert(1, true); + let json_str = super::as_pretty_json(&hm).to_string(); + match Json::from_str(&json_str) { + Err(_) => panic!("Unable to parse json_str: {}", json_str), + _ => {} // it parsed and we are good to go + } + } + + #[test] + fn test_prettyencode_hashmap_with_numeric_key() { + use std::collections::HashMap; + let mut hm: HashMap = HashMap::new(); + hm.insert(1, true); + let json_str = super::as_pretty_json(&hm).to_string(); + match Json::from_str(&json_str) { + Err(_) => panic!("Unable to parse json_str: {}", json_str), + _ => {} // it parsed and we are good to go + } + } + + #[test] + fn test_prettyencoder_indent_level_param() { + use std::collections::BTreeMap; + + let mut tree = BTreeMap::new(); + + tree.insert("hello".to_string(), String("guten tag".to_string())); + tree.insert("goodbye".to_string(), String("sayonara".to_string())); + + let json = Array( + // The following layout below should look a lot like + // the pretty-printed JSON (indent * x) + vec! + ( // 0x + String("greetings".to_string()), // 1x + Object(tree), // 1x + 2x + 2x + 1x + ) // 0x + // End JSON array (7 lines) + ); + + // Helper function for counting indents + fn indents(source: &str) -> usize { + let trimmed = source.trim_left_matches(' '); + source.len() - trimmed.len() + } + + // Test up to 4 spaces of indents (more?) + for i in 0..4 { + let printed = super::as_pretty_json(&json).indent(i as u32) + .to_string(); + + // Check for indents at each line + let lines: Vec<&str> = printed.lines().collect(); + assert_eq!(lines.len(), 7); // JSON should be 7 lines + + assert_eq!(indents(lines[0]), 0 * i); // [ + assert_eq!(indents(lines[1]), 1 * i); // "greetings", + assert_eq!(indents(lines[2]), 1 * i); // { + assert_eq!(indents(lines[3]), 2 * i); // "hello": "guten tag", + assert_eq!(indents(lines[4]), 2 * i); // "goodbye": "sayonara" + assert_eq!(indents(lines[5]), 1 * i); // }, + assert_eq!(indents(lines[6]), 0 * i); // ] + + // Finally, test that the pretty-printed JSON is valid + Json::from_str(&printed).ok() + .expect("Pretty-printed JSON is invalid!"); + } + } + + #[test] + fn test_hashmap_with_numeric_key_can_handle_double_quote_delimited_key() { + use std::collections::HashMap; + use Decodable; + let json_str = "{\"1\":true}"; + let json_obj = match Json::from_str(json_str) { + Err(_) => panic!("Unable to parse json_str: {}", json_str), + Ok(o) => o + }; + let mut decoder = Decoder::new(json_obj); + let _hm: HashMap = Decodable::decode(&mut decoder).unwrap(); + } + + #[test] + fn test_hashmap_with_enum_key() { + use std::collections::HashMap; + use json; + #[derive(RustcEncodable, Eq, Hash, PartialEq, RustcDecodable, Debug)] + enum Enum { + Foo, + #[allow(dead_code)] + Bar, + } + let mut map = HashMap::new(); + map.insert(Enum::Foo, 0); + let result = json::encode(&map).unwrap(); + assert_eq!(result, r#"{"Foo":0}"#); + let decoded: HashMap = json::decode(&result).unwrap(); + assert_eq!(map, decoded); + } + + #[test] + fn test_hashmap_with_numeric_key_will_error_with_string_keys() { + use std::collections::HashMap; + use Decodable; + let json_str = "{\"a\":true}"; + let json_obj = match Json::from_str(json_str) { + Err(_) => panic!("Unable to parse json_str: {}", json_str), + Ok(o) => o + }; + let mut decoder = Decoder::new(json_obj); + let result: Result, DecoderError> = Decodable::decode(&mut decoder); + assert_eq!(result, Err(ExpectedError("Number".to_string(), "a".to_string()))); + } + + fn assert_stream_equal(src: &str, + expected: Vec<(JsonEvent, Vec)>) { + let mut parser = Parser::new(src.chars()); + let mut i = 0; + loop { + let evt = match parser.next() { + Some(e) => e, + None => { break; } + }; + let (ref expected_evt, ref expected_stack) = expected[i]; + if !parser.stack().is_equal_to(&expected_stack) { + panic!("Parser stack is not equal to {:?}", expected_stack); + } + assert_eq!(&evt, expected_evt); + i+=1; + } + } + #[test] + #[cfg_attr(target_word_size = "32", ignore)] // FIXME(#14064) + fn test_streaming_parser() { + assert_stream_equal( + r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#, + vec![ + (ObjectStart, vec![]), + (StringValue("bar".to_string()), vec![Key("foo")]), + (ArrayStart, vec![Key("array")]), + (U64Value(0), vec![Key("array"), Index(0)]), + (U64Value(1), vec![Key("array"), Index(1)]), + (U64Value(2), vec![Key("array"), Index(2)]), + (U64Value(3), vec![Key("array"), Index(3)]), + (U64Value(4), vec![Key("array"), Index(4)]), + (U64Value(5), vec![Key("array"), Index(5)]), + (ArrayEnd, vec![Key("array")]), + (ArrayStart, vec![Key("idents")]), + (NullValue, vec![Key("idents"), Index(0)]), + (BooleanValue(true), vec![Key("idents"), Index(1)]), + (BooleanValue(false), vec![Key("idents"), Index(2)]), + (ArrayEnd, vec![Key("idents")]), + (ObjectEnd, vec![]), + ] + ); + } + fn last_event(src: &str) -> JsonEvent { + let mut parser = Parser::new(src.chars()); + let mut evt = NullValue; + loop { + evt = match parser.next() { + Some(e) => e, + None => return evt, + } + } + } + + #[test] + #[cfg_attr(target_word_size = "32", ignore)] // FIXME(#14064) + fn test_read_object_streaming() { + assert_eq!(last_event("{ "), Error(SyntaxError(EOFWhileParsingObject, 1, 3))); + assert_eq!(last_event("{1"), Error(SyntaxError(KeyMustBeAString, 1, 2))); + assert_eq!(last_event("{ \"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); + assert_eq!(last_event("{\"a\""), Error(SyntaxError(EOFWhileParsingObject, 1, 5))); + assert_eq!(last_event("{\"a\" "), Error(SyntaxError(EOFWhileParsingObject, 1, 6))); + + assert_eq!(last_event("{\"a\" 1"), Error(SyntaxError(ExpectedColon, 1, 6))); + assert_eq!(last_event("{\"a\":"), Error(SyntaxError(EOFWhileParsingValue, 1, 6))); + assert_eq!(last_event("{\"a\":1"), Error(SyntaxError(EOFWhileParsingObject, 1, 7))); + assert_eq!(last_event("{\"a\":1 1"), Error(SyntaxError(InvalidSyntax, 1, 8))); + assert_eq!(last_event("{\"a\":1,"), Error(SyntaxError(EOFWhileParsingObject, 1, 8))); + assert_eq!(last_event("{\"a\":1,}"), Error(SyntaxError(TrailingComma, 1, 8))); + + assert_stream_equal( + "{}", + vec![(ObjectStart, vec![]), (ObjectEnd, vec![])] + ); + assert_stream_equal( + "{\"a\": 3}", + vec![ + (ObjectStart, vec![]), + (U64Value(3), vec![Key("a")]), + (ObjectEnd, vec![]), + ] + ); + assert_stream_equal( + "{ \"a\": null, \"b\" : true }", + vec![ + (ObjectStart, vec![]), + (NullValue, vec![Key("a")]), + (BooleanValue(true), vec![Key("b")]), + (ObjectEnd, vec![]), + ] + ); + assert_stream_equal( + "{\"a\" : 1.0 ,\"b\": [ true ]}", + vec![ + (ObjectStart, vec![]), + (F64Value(1.0), vec![Key("a")]), + (ArrayStart, vec![Key("b")]), + (BooleanValue(true),vec![Key("b"), Index(0)]), + (ArrayEnd, vec![Key("b")]), + (ObjectEnd, vec![]), + ] + ); + assert_stream_equal( + r#"{ + "a": 1.0, + "b": [ + true, + "foo\nbar", + { "c": {"d": null} }, + "\uD834\uDF06" + ] + }"#, + vec![ + (ObjectStart, vec![]), + (F64Value(1.0), vec![Key("a")]), + (ArrayStart, vec![Key("b")]), + (BooleanValue(true), vec![Key("b"), Index(0)]), + (StringValue("foo\nbar".to_string()), vec![Key("b"), Index(1)]), + (ObjectStart, vec![Key("b"), Index(2)]), + (ObjectStart, vec![Key("b"), Index(2), Key("c")]), + (NullValue, vec![Key("b"), Index(2), Key("c"), Key("d")]), + (ObjectEnd, vec![Key("b"), Index(2), Key("c")]), + (ObjectEnd, vec![Key("b"), Index(2)]), + (StringValue("\u{1D306}".to_string()), vec![Key("b"), Index(3)]), + (ArrayEnd, vec![Key("b")]), + (ObjectEnd, vec![]), + ] + ); + } + #[test] + #[cfg_attr(target_word_size = "32", ignore)] // FIXME(#14064) + fn test_read_array_streaming() { + assert_stream_equal( + "[]", + vec![ + (ArrayStart, vec![]), + (ArrayEnd, vec![]), + ] + ); + assert_stream_equal( + "[ ]", + vec![ + (ArrayStart, vec![]), + (ArrayEnd, vec![]), + ] + ); + assert_stream_equal( + "[true]", + vec![ + (ArrayStart, vec![]), + (BooleanValue(true), vec![Index(0)]), + (ArrayEnd, vec![]), + ] + ); + assert_stream_equal( + "[ false ]", + vec![ + (ArrayStart, vec![]), + (BooleanValue(false), vec![Index(0)]), + (ArrayEnd, vec![]), + ] + ); + assert_stream_equal( + "[null]", + vec![ + (ArrayStart, vec![]), + (NullValue, vec![Index(0)]), + (ArrayEnd, vec![]), + ] + ); + assert_stream_equal( + "[3, 1]", + vec![ + (ArrayStart, vec![]), + (U64Value(3), vec![Index(0)]), + (U64Value(1), vec![Index(1)]), + (ArrayEnd, vec![]), + ] + ); + assert_stream_equal( + "\n[3, 2]\n", + vec![ + (ArrayStart, vec![]), + (U64Value(3), vec![Index(0)]), + (U64Value(2), vec![Index(1)]), + (ArrayEnd, vec![]), + ] + ); + assert_stream_equal( + "[2, [4, 1]]", + vec![ + (ArrayStart, vec![]), + (U64Value(2), vec![Index(0)]), + (ArrayStart, vec![Index(1)]), + (U64Value(4), vec![Index(1), Index(0)]), + (U64Value(1), vec![Index(1), Index(1)]), + (ArrayEnd, vec![Index(1)]), + (ArrayEnd, vec![]), + ] + ); + + assert_eq!(last_event("["), Error(SyntaxError(EOFWhileParsingValue, 1, 2))); + + assert_eq!(Json::from_str("["), Err(SyntaxError(EOFWhileParsingValue, 1, 2))); + assert_eq!(Json::from_str("[1"), Err(SyntaxError(EOFWhileParsingArray, 1, 3))); + assert_eq!(Json::from_str("[1,"), Err(SyntaxError(EOFWhileParsingValue, 1, 4))); + assert_eq!(Json::from_str("[1,]"), Err(SyntaxError(InvalidSyntax, 1, 4))); + assert_eq!(Json::from_str("[6 7]"), Err(SyntaxError(InvalidSyntax, 1, 4))); + + } + #[test] + fn test_trailing_characters_streaming() { + assert_eq!(last_event("nulla"), Error(SyntaxError(TrailingCharacters, 1, 5))); + assert_eq!(last_event("truea"), Error(SyntaxError(TrailingCharacters, 1, 5))); + assert_eq!(last_event("falsea"), Error(SyntaxError(TrailingCharacters, 1, 6))); + assert_eq!(last_event("1a"), Error(SyntaxError(TrailingCharacters, 1, 2))); + assert_eq!(last_event("[]a"), Error(SyntaxError(TrailingCharacters, 1, 3))); + assert_eq!(last_event("{}a"), Error(SyntaxError(TrailingCharacters, 1, 3))); + } + #[test] + fn test_read_identifiers_streaming() { + assert_eq!(Parser::new("null".chars()).next(), Some(NullValue)); + assert_eq!(Parser::new("true".chars()).next(), Some(BooleanValue(true))); + assert_eq!(Parser::new("false".chars()).next(), Some(BooleanValue(false))); + + assert_eq!(last_event("n"), Error(SyntaxError(InvalidSyntax, 1, 2))); + assert_eq!(last_event("nul"), Error(SyntaxError(InvalidSyntax, 1, 4))); + assert_eq!(last_event("t"), Error(SyntaxError(InvalidSyntax, 1, 2))); + assert_eq!(last_event("truz"), Error(SyntaxError(InvalidSyntax, 1, 4))); + assert_eq!(last_event("f"), Error(SyntaxError(InvalidSyntax, 1, 2))); + assert_eq!(last_event("faz"), Error(SyntaxError(InvalidSyntax, 1, 3))); + } + + #[test] + fn test_stack() { + let mut stack = Stack::new(); + + assert!(stack.is_empty()); + assert!(stack.len() == 0); + assert!(!stack.last_is_index()); + + stack.push_index(0); + stack.bump_index(); + + assert!(stack.len() == 1); + assert!(stack.is_equal_to(&[Index(1)])); + assert!(stack.starts_with(&[Index(1)])); + assert!(stack.ends_with(&[Index(1)])); + assert!(stack.last_is_index()); + assert!(stack.get(0) == Index(1)); + + stack.push_key("foo".to_string()); + + assert!(stack.len() == 2); + assert!(stack.is_equal_to(&[Index(1), Key("foo")])); + assert!(stack.starts_with(&[Index(1), Key("foo")])); + assert!(stack.starts_with(&[Index(1)])); + assert!(stack.ends_with(&[Index(1), Key("foo")])); + assert!(stack.ends_with(&[Key("foo")])); + assert!(!stack.last_is_index()); + assert!(stack.get(0) == Index(1)); + assert!(stack.get(1) == Key("foo")); + + stack.push_key("bar".to_string()); + + assert!(stack.len() == 3); + assert!(stack.is_equal_to(&[Index(1), Key("foo"), Key("bar")])); + assert!(stack.starts_with(&[Index(1)])); + assert!(stack.starts_with(&[Index(1), Key("foo")])); + assert!(stack.starts_with(&[Index(1), Key("foo"), Key("bar")])); + assert!(stack.ends_with(&[Key("bar")])); + assert!(stack.ends_with(&[Key("foo"), Key("bar")])); + assert!(stack.ends_with(&[Index(1), Key("foo"), Key("bar")])); + assert!(!stack.last_is_index()); + assert!(stack.get(0) == Index(1)); + assert!(stack.get(1) == Key("foo")); + assert!(stack.get(2) == Key("bar")); + + stack.pop(); + + assert!(stack.len() == 2); + assert!(stack.is_equal_to(&[Index(1), Key("foo")])); + assert!(stack.starts_with(&[Index(1), Key("foo")])); + assert!(stack.starts_with(&[Index(1)])); + assert!(stack.ends_with(&[Index(1), Key("foo")])); + assert!(stack.ends_with(&[Key("foo")])); + assert!(!stack.last_is_index()); + assert!(stack.get(0) == Index(1)); + assert!(stack.get(1) == Key("foo")); + } + + #[test] + fn test_to_json() { + use std::collections::{HashMap,BTreeMap}; + use super::ToJson; + + let array2 = Array(vec!(I64(1), I64(2))); + let array3 = Array(vec!(I64(1), I64(2), I64(3))); + let object = { + let mut tree_map = BTreeMap::new(); + tree_map.insert("a".to_string(), U64(1)); + tree_map.insert("b".to_string(), U64(2)); + Object(tree_map) + }; + + assert_eq!(array2.to_json(), array2); + assert_eq!(object.to_json(), object); + assert_eq!(3_isize.to_json(), I64(3)); + assert_eq!(4_i8.to_json(), I64(4)); + assert_eq!(5_i16.to_json(), I64(5)); + assert_eq!(6_i32.to_json(), I64(6)); + assert_eq!(7_i64.to_json(), I64(7)); + assert_eq!(8_usize.to_json(), U64(8)); + assert_eq!(9_u8.to_json(), U64(9)); + assert_eq!(10_u16.to_json(), U64(10)); + assert_eq!(11_u32.to_json(), U64(11)); + assert_eq!(12_u64.to_json(), U64(12)); + assert_eq!(13.0_f32.to_json(), F64(13.0_f64)); + assert_eq!(14.0_f64.to_json(), F64(14.0_f64)); + assert_eq!(().to_json(), Null); + assert_eq!(f32::INFINITY.to_json(), Null); + assert_eq!(f64::NAN.to_json(), Null); + assert_eq!(true.to_json(), Boolean(true)); + assert_eq!(false.to_json(), Boolean(false)); + assert_eq!("abc".to_json(), String("abc".to_string())); + assert_eq!("abc".to_string().to_json(), String("abc".to_string())); + assert_eq!((1, 2).to_json(), array2); + assert_eq!((1, 2, 3).to_json(), array3); + assert_eq!([1, 2].to_json(), array2); + assert_eq!((&[1, 2, 3]).to_json(), array3); + assert_eq!((vec![1, 2]).to_json(), array2); + assert_eq!(vec!(1, 2, 3).to_json(), array3); + let mut tree_map = BTreeMap::new(); + tree_map.insert("a".to_string(), 1 as u32); + tree_map.insert("b".to_string(), 2); + assert_eq!(tree_map.to_json(), object); + let mut hash_map = HashMap::new(); + hash_map.insert("a".to_string(), 1 as u32); + hash_map.insert("b".to_string(), 2); + assert_eq!(hash_map.to_json(), object); + assert_eq!(Some(15).to_json(), I64(15)); + assert_eq!(Some(15 as u32).to_json(), U64(15)); + assert_eq!(None::.to_json(), Null); + } + + #[test] + fn test_encode_hashmap_with_arbitrary_key() { + use std::collections::HashMap; + #[derive(PartialEq, Eq, Hash, RustcEncodable)] + struct ArbitraryType(u32); + let mut hm: HashMap = HashMap::new(); + hm.insert(ArbitraryType(1), true); + let mut mem_buf = string::String::new(); + let mut encoder = Encoder::new(&mut mem_buf); + let result = hm.encode(&mut encoder); + match result.err().unwrap() { + EncoderError::BadHashmapKey => (), + _ => panic!("expected bad hash map key") + } + } + + #[test] + fn test_bad_json_stack_depleted() { + use json; + #[derive(Debug, RustcDecodable)] + enum ChatEvent { + Variant(i32) + } + let serialized = "{\"variant\": \"Variant\", \"fields\": []}"; + let r: Result = json::decode(serialized); + assert!(r.unwrap_err() == EOF); + } + + #[test] + fn fixed_length_array() { + #[derive(Debug, RustcDecodable, RustcEncodable, Eq, PartialEq)] + struct Foo { + a: [u8; 1], + b: [i32; 2], + c: [u64; 3], + } + let f = Foo { + a: [0], + b: [1, 2], + c: [3, 4, 5], + }; + let s = super::encode(&f).unwrap(); + let d = super::decode(&s).unwrap(); + assert_eq!(f, d); + } +} diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000000000..b1ebbd94bb8eb --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,55 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Support code for encoding and decoding types. +//! +//! # Usage +//! +//! This crate is [on crates.io](https://crates.io/crates/rustc-serialize) and +//! can be used by adding `rustc-serialize` to the dependencies in your +//! project's `Cargo.toml`. +//! +//! ```toml +//! [dependencies] +//! rustc-serialize = "0.3" +//! ``` +//! +//! and this to your crate root: +//! +//! ```rust +//! extern crate rustc_serialize; +//! ``` + +#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "http://www.rust-lang.org/favicon.ico", + html_root_url = "http://doc.rust-lang.org/rustc-serialize/")] +#![cfg_attr(test, deny(warnings))] +#![allow(trivial_numeric_casts)] +#![cfg_attr(rust_build, feature(staged_api))] +#![cfg_attr(rust_build, staged_api)] +#![cfg_attr(rust_build, + unstable(feature = "rustc_private", + reason = "use the crates.io `rustc-serialize` library instead"))] + +#[cfg(test)] extern crate rand; + +pub use self::serialize::{Decoder, Encoder, Decodable, Encodable, + DecoderHelpers, EncoderHelpers}; + +mod serialize; +mod collection_impls; + +pub mod base64; +pub mod hex; +pub mod json; + +mod rustc_serialize { + pub use serialize::*; +} diff --git a/src/serialize.rs b/src/serialize.rs new file mode 100644 index 0000000000000..52507ca8ccc9b --- /dev/null +++ b/src/serialize.rs @@ -0,0 +1,725 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Support code for encoding and decoding types. + +/* +Core encoding and decoding interfaces. +*/ + +use std::cell::{Cell, RefCell}; +use std::ffi::OsString; +use std::path; +use std::rc::Rc; +use std::sync::Arc; +use std::marker::PhantomData; +use std::borrow::Cow; + +pub trait Encoder { + type Error; + + // Primitive types: + fn emit_nil(&mut self) -> Result<(), Self::Error>; + fn emit_usize(&mut self, v: usize) -> Result<(), Self::Error>; + fn emit_u64(&mut self, v: u64) -> Result<(), Self::Error>; + fn emit_u32(&mut self, v: u32) -> Result<(), Self::Error>; + fn emit_u16(&mut self, v: u16) -> Result<(), Self::Error>; + fn emit_u8(&mut self, v: u8) -> Result<(), Self::Error>; + fn emit_isize(&mut self, v: isize) -> Result<(), Self::Error>; + fn emit_i64(&mut self, v: i64) -> Result<(), Self::Error>; + fn emit_i32(&mut self, v: i32) -> Result<(), Self::Error>; + fn emit_i16(&mut self, v: i16) -> Result<(), Self::Error>; + fn emit_i8(&mut self, v: i8) -> Result<(), Self::Error>; + fn emit_bool(&mut self, v: bool) -> Result<(), Self::Error>; + fn emit_f64(&mut self, v: f64) -> Result<(), Self::Error>; + fn emit_f32(&mut self, v: f32) -> Result<(), Self::Error>; + fn emit_char(&mut self, v: char) -> Result<(), Self::Error>; + fn emit_str(&mut self, v: &str) -> Result<(), Self::Error>; + + // Compound types: + fn emit_enum(&mut self, name: &str, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + fn emit_enum_variant(&mut self, v_name: &str, + v_id: usize, + len: usize, + f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_enum_variant_arg(&mut self, a_idx: usize, f: F) + -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + fn emit_enum_struct_variant(&mut self, v_name: &str, + v_id: usize, + len: usize, + f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_enum_struct_variant_field(&mut self, + f_name: &str, + f_idx: usize, + f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + fn emit_struct(&mut self, name: &str, len: usize, f: F) + -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_struct_field(&mut self, f_name: &str, f_idx: usize, f: F) + -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + fn emit_tuple(&mut self, len: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_tuple_arg(&mut self, idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + fn emit_tuple_struct(&mut self, name: &str, len: usize, f: F) + -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_tuple_struct_arg(&mut self, f_idx: usize, f: F) + -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + // Specialized types: + fn emit_option(&mut self, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_option_none(&mut self) -> Result<(), Self::Error>; + fn emit_option_some(&mut self, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + fn emit_seq(&mut self, len: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_seq_elt(&mut self, idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + + fn emit_map(&mut self, len: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_map_elt_key(&mut self, idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_map_elt_val(&mut self, idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error>; +} + +pub trait Decoder { + type Error; + + // Primitive types: + fn read_nil(&mut self) -> Result<(), Self::Error>; + fn read_usize(&mut self) -> Result; + fn read_u64(&mut self) -> Result; + fn read_u32(&mut self) -> Result; + fn read_u16(&mut self) -> Result; + fn read_u8(&mut self) -> Result; + fn read_isize(&mut self) -> Result; + fn read_i64(&mut self) -> Result; + fn read_i32(&mut self) -> Result; + fn read_i16(&mut self) -> Result; + fn read_i8(&mut self) -> Result; + fn read_bool(&mut self) -> Result; + fn read_f64(&mut self) -> Result; + fn read_f32(&mut self) -> Result; + fn read_char(&mut self) -> Result; + fn read_str(&mut self) -> Result; + + // Compound types: + fn read_enum(&mut self, name: &str, f: F) -> Result + where F: FnOnce(&mut Self) -> Result; + + fn read_enum_variant(&mut self, names: &[&str], f: F) + -> Result + where F: FnMut(&mut Self, usize) -> Result; + fn read_enum_variant_arg(&mut self, a_idx: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + + fn read_enum_struct_variant(&mut self, names: &[&str], f: F) + -> Result + where F: FnMut(&mut Self, usize) -> Result; + fn read_enum_struct_variant_field(&mut self, + &f_name: &str, + f_idx: usize, + f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + + fn read_struct(&mut self, s_name: &str, len: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + fn read_struct_field(&mut self, + f_name: &str, + f_idx: usize, + f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + + fn read_tuple(&mut self, len: usize, f: F) -> Result + where F: FnOnce(&mut Self) -> Result; + fn read_tuple_arg(&mut self, a_idx: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + + fn read_tuple_struct(&mut self, s_name: &str, len: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + fn read_tuple_struct_arg(&mut self, a_idx: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + + // Specialized types: + fn read_option(&mut self, f: F) -> Result + where F: FnMut(&mut Self, bool) -> Result; + + fn read_seq(&mut self, f: F) -> Result + where F: FnOnce(&mut Self, usize) -> Result; + fn read_seq_elt(&mut self, idx: usize, f: F) -> Result + where F: FnOnce(&mut Self) -> Result; + + fn read_map(&mut self, f: F) -> Result + where F: FnOnce(&mut Self, usize) -> Result; + fn read_map_elt_key(&mut self, idx: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + fn read_map_elt_val(&mut self, idx: usize, f: F) + -> Result + where F: FnOnce(&mut Self) -> Result; + + // Failure + fn error(&mut self, err: &str) -> Self::Error; +} + +pub trait Encodable { + fn encode(&self, s: &mut S) -> Result<(), S::Error>; +} + +pub trait Decodable { + fn decode(d: &mut D) -> Result; +} + +impl Encodable for usize { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_usize(*self) + } +} + +impl Decodable for usize { + fn decode(d: &mut D) -> Result { + d.read_usize() + } +} + +impl Encodable for u8 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u8(*self) + } +} + +impl Decodable for u8 { + fn decode(d: &mut D) -> Result { + d.read_u8() + } +} + +impl Encodable for u16 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u16(*self) + } +} + +impl Decodable for u16 { + fn decode(d: &mut D) -> Result { + d.read_u16() + } +} + +impl Encodable for u32 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u32(*self) + } +} + +impl Decodable for u32 { + fn decode(d: &mut D) -> Result { + d.read_u32() + } +} + +impl Encodable for u64 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u64(*self) + } +} + +impl Decodable for u64 { + fn decode(d: &mut D) -> Result { + d.read_u64() + } +} + +impl Encodable for isize { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_isize(*self) + } +} + +impl Decodable for isize { + fn decode(d: &mut D) -> Result { + d.read_isize() + } +} + +impl Encodable for i8 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_i8(*self) + } +} + +impl Decodable for i8 { + fn decode(d: &mut D) -> Result { + d.read_i8() + } +} + +impl Encodable for i16 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_i16(*self) + } +} + +impl Decodable for i16 { + fn decode(d: &mut D) -> Result { + d.read_i16() + } +} + +impl Encodable for i32 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_i32(*self) + } +} + +impl Decodable for i32 { + fn decode(d: &mut D) -> Result { + d.read_i32() + } +} + +impl Encodable for i64 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_i64(*self) + } +} + +impl Decodable for i64 { + fn decode(d: &mut D) -> Result { + d.read_i64() + } +} + +impl Encodable for str { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(self) + } +} + +impl Encodable for String { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(self) + } +} + +impl Decodable for String { + fn decode(d: &mut D) -> Result { + d.read_str() + } +} + +impl Encodable for f32 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_f32(*self) + } +} + +impl Decodable for f32 { + fn decode(d: &mut D) -> Result { + d.read_f32() + } +} + +impl Encodable for f64 { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_f64(*self) + } +} + +impl Decodable for f64 { + fn decode(d: &mut D) -> Result { + d.read_f64() + } +} + +impl Encodable for bool { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_bool(*self) + } +} + +impl Decodable for bool { + fn decode(d: &mut D) -> Result { + d.read_bool() + } +} + +impl Encodable for char { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_char(*self) + } +} + +impl Decodable for char { + fn decode(d: &mut D) -> Result { + d.read_char() + } +} + +impl Encodable for () { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_nil() + } +} + +impl Decodable for () { + fn decode(d: &mut D) -> Result<(), D::Error> { + d.read_nil() + } +} + +impl<'a, T: ?Sized + Encodable> Encodable for &'a T { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + (**self).encode(s) + } +} + +impl Encodable for Box { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + (**self).encode(s) + } +} + +impl< T: Decodable> Decodable for Box { + fn decode(d: &mut D) -> Result, D::Error> { + Ok(Box::new(try!(Decodable::decode(d)))) + } +} + +impl Encodable for Rc { + #[inline] + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + (**self).encode(s) + } +} + +impl Decodable for Rc { + #[inline] + fn decode(d: &mut D) -> Result, D::Error> { + Ok(Rc::new(try!(Decodable::decode(d)))) + } +} + +impl<'a, T:Encodable + ToOwned + ?Sized> Encodable for Cow<'a, T> { + #[inline] + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + (**self).encode(s) + } +} + +impl<'a, T: ?Sized> Decodable for Cow<'a, T> + where T: ToOwned, T::Owned: Decodable +{ + #[inline] + fn decode(d: &mut D) -> Result, D::Error> { + Ok(Cow::Owned(try!(Decodable::decode(d)))) + } +} + +impl Encodable for [T] { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_seq(self.len(), |s| { + for (i, e) in self.iter().enumerate() { + try!(s.emit_seq_elt(i, |s| e.encode(s))) + } + Ok(()) + }) + } +} + +impl Encodable for Vec { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_seq(self.len(), |s| { + for (i, e) in self.iter().enumerate() { + try!(s.emit_seq_elt(i, |s| e.encode(s))) + } + Ok(()) + }) + } +} + +impl Decodable for Vec { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_seq(|d, len| { + let mut v = Vec::with_capacity(len); + for i in 0..len { + v.push(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); + } + Ok(v) + }) + } +} + +impl Encodable for Option { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_option(|s| { + match *self { + None => s.emit_option_none(), + Some(ref v) => s.emit_option_some(|s| v.encode(s)), + } + }) + } +} + +impl Decodable for Option { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_option(|d, b| { + if b { + Ok(Some(try!(Decodable::decode(d)))) + } else { + Ok(None) + } + }) + } +} + +impl Encodable for PhantomData { + fn encode(&self, _s: &mut S) -> Result<(), S::Error> { + Ok(()) + } +} + +impl Decodable for PhantomData { + fn decode(_d: &mut D) -> Result, D::Error> { + Ok(PhantomData) + } +} + +macro_rules! peel { + ($name:ident, $($other:ident,)*) => (tuple! { $($other,)* }) +} + +/// Evaluates to the number of identifiers passed to it, for example: +/// `count_idents!(a, b, c) == 3 +macro_rules! count_idents { + () => { 0 }; + ($_i:ident, $($rest:ident,)*) => { 1 + count_idents!($($rest,)*) } +} + +macro_rules! tuple { + () => (); + ( $($name:ident,)+ ) => ( + impl<$($name:Decodable),*> Decodable for ($($name,)*) { + fn decode(d: &mut D) -> Result<($($name,)*), D::Error> { + let len: usize = count_idents!($($name,)*); + d.read_tuple(len, |d| { + let mut i = 0; + let ret = ($(try!(d.read_tuple_arg({ i+=1; i-1 }, + |d| -> Result<$name,D::Error> { + Decodable::decode(d) + })),)*); + return Ok(ret); + }) + } + } + impl<$($name:Encodable),*> Encodable for ($($name,)*) { + #[allow(non_snake_case)] + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + let ($(ref $name,)*) = *self; + let mut n = 0; + $(let $name = $name; n += 1;)* + s.emit_tuple(n, |s| { + let mut i = 0; + $(try!(s.emit_tuple_arg({ i+=1; i-1 }, |s| $name.encode(s)));)* + Ok(()) + }) + } + } + peel! { $($name,)* } + ) +} + +tuple! { T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, } + +macro_rules! array { + ($zero:expr) => (); + ($len:expr, $($idx:expr),*) => { + impl Decodable for [T; $len] { + fn decode(d: &mut D) -> Result<[T; $len], D::Error> { + d.read_seq(|d, len| { + if len != $len { + return Err(d.error("wrong array length")); + } + Ok([$( + try!(d.read_seq_elt($len - $idx - 1, + |d| Decodable::decode(d))) + ),+]) + }) + } + } + + impl Encodable for [T; $len] { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_seq($len, |s| { + for i in 0..$len { + try!(s.emit_seq_elt(i, |s| self[i].encode(s))); + } + Ok(()) + }) + } + } + array! { $($idx),* } + } +} + +array! { + 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, + 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 +} + +impl Encodable for path::Path { + #[cfg(unix)] + fn encode(&self, e: &mut S) -> Result<(), S::Error> { + use std::os::unix::prelude::*; + self.as_os_str().as_bytes().encode(e) + } + #[cfg(windows)] + fn encode(&self, e: &mut S) -> Result<(), S::Error> { + use std::os::windows::prelude::*; + let v = self.as_os_str().encode_wide().collect::>(); + v.encode(e) + } +} + +impl Encodable for path::PathBuf { + fn encode(&self, e: &mut S) -> Result<(), S::Error> { + (**self).encode(e) + } +} + +impl Decodable for path::PathBuf { + #[cfg(unix)] + fn decode(d: &mut D) -> Result { + use std::os::unix::prelude::*; + let bytes: Vec = try!(Decodable::decode(d)); + let s: OsString = OsStringExt::from_vec(bytes); + let mut p = path::PathBuf::new(); + p.push(s); + Ok(p) + } + #[cfg(windows)] + fn decode(d: &mut D) -> Result { + use std::os::windows::prelude::*; + let bytes: Vec = try!(Decodable::decode(d)); + let s: OsString = OsStringExt::from_wide(&bytes); + let mut p = path::PathBuf::new(); + p.push(s); + Ok(p) + } +} + +impl Encodable for Cell { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.get().encode(s) + } +} + +impl Decodable for Cell { + fn decode(d: &mut D) -> Result, D::Error> { + Ok(Cell::new(try!(Decodable::decode(d)))) + } +} + +// FIXME: #15036 +// Should use `try_borrow`, returning a +// `encoder.error("attempting to Encode borrowed RefCell")` +// from `encode` when `try_borrow` returns `None`. + +impl Encodable for RefCell { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + self.borrow().encode(s) + } +} + +impl Decodable for RefCell { + fn decode(d: &mut D) -> Result, D::Error> { + Ok(RefCell::new(try!(Decodable::decode(d)))) + } +} + +impl Encodable for Arc { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + (**self).encode(s) + } +} + +impl Decodable for Arc { + fn decode(d: &mut D) -> Result, D::Error> { + Ok(Arc::new(try!(Decodable::decode(d)))) + } +} + +// ___________________________________________________________________________ +// Helper routines + +pub trait EncoderHelpers: Encoder { + fn emit_from_vec(&mut self, v: &[T], f: F) + -> Result<(), ::Error> + where F: FnMut(&mut Self, &T) -> Result<(), ::Error>; +} + +impl EncoderHelpers for S { + fn emit_from_vec(&mut self, v: &[T], mut f: F) -> Result<(), S::Error> where + F: FnMut(&mut S, &T) -> Result<(), S::Error>, + { + self.emit_seq(v.len(), |this| { + for (i, e) in v.iter().enumerate() { + try!(this.emit_seq_elt(i, |this| { + f(this, e) + })); + } + Ok(()) + }) + } +} + +pub trait DecoderHelpers: Decoder { + fn read_to_vec(&mut self, f: F) + -> Result, ::Error> where + F: FnMut(&mut Self) -> Result::Error>; +} + +impl DecoderHelpers for D { + fn read_to_vec(&mut self, mut f: F) -> Result, D::Error> where F: + FnMut(&mut D) -> Result, + { + self.read_seq(|this, len| { + let mut v = Vec::with_capacity(len); + for i in 0..len { + v.push(try!(this.read_seq_elt(i, |this| f(this)))); + } + Ok(v) + }) + } +} From be7e9c79eff361ab786b8f0d2546b053c5873426 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 5 Jun 2015 10:55:01 -0700 Subject: [PATCH 4/6] Squashed 'src/external/getopts/' content from commit 478ce9b git-subtree-dir: src/external/getopts git-subtree-split: 478ce9bdb98096afa066e79c64efb6683a8fa112 --- .gitignore | 2 + .travis.yml | 24 + Cargo.toml | 16 + LICENSE-APACHE | 201 ++++++ LICENSE-MIT | 25 + README.md | 23 + src/lib.rs | 1715 ++++++++++++++++++++++++++++++++++++++++++++++++ tests/smoke.rs | 8 + 8 files changed, 2014 insertions(+) create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 Cargo.toml create mode 100644 LICENSE-APACHE create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100644 src/lib.rs create mode 100644 tests/smoke.rs diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000..4fffb2f89cbd8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/target +/Cargo.lock diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000000000..01859cab7657e --- /dev/null +++ b/.travis.yml @@ -0,0 +1,24 @@ +language: rust +rust: + - 1.0.0 + - beta + - nightly +script: + - cargo build --verbose + - cargo test --verbose + - cargo doc +after_success: | + [ $TRAVIS_BRANCH = master ] && + [ $TRAVIS_PULL_REQUEST = false ] && + [ $TRAVIS_RUST_VERSION = nightly ] && + echo '' > target/doc/index.html && + sudo pip install ghp-import && + ghp-import -n target/doc && + git push -fq https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages + +env: + global: + secure: H6SBXFQ/i/xRTCkEgKwk9WTSKXXaaEMqrASLbzKqhqJHR3ed/vp5YBWzlCJkAI3+2j3iVmbCfHqtpzmF9f1GAn+apg3mDI5GfFRJAfncHhZoN332wcBLvvNuRvJS9wRk3j3kOLKjBJmPa7+TCFafyXfCAutzfUlQPK8dgxG8WEI= +notifications: + email: + on_success: never diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000000000..f792419dc59c6 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,16 @@ +[package] + +name = "getopts" +version = "0.2.11" +authors = ["The Rust Project Developers"] +license = "MIT/Apache-2.0" +readme = "README.md" +repository = "https://github.com/rust-lang/getopts" +documentation = "http://doc.rust-lang.org/getopts" +homepage = "https://github.com/rust-lang/getopts" +description = """ +getopts-like option parsing. +""" + +[dependencies] +log = "0.3" diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 0000000000000..16fe87b06e802 --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + +Copyright [yyyy] [name of copyright owner] + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000000000..39d4bdb5acd31 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,25 @@ +Copyright (c) 2014 The Rust Project Developers + +Permission is hereby granted, free of charge, to any +person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the +Software without restriction, including without +limitation the rights to use, copy, modify, merge, +publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software +is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions +of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF +ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED +TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A +PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT +SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR +IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000000..c19f48fb06b5c --- /dev/null +++ b/README.md @@ -0,0 +1,23 @@ +getopts +=== + +A Rust library for option parsing for CLI utilities. + +[![Build Status](https://travis-ci.org/rust-lang/getopts.svg?branch=master)](https://travis-ci.org/rust-lang/getopts) + +[Documentation](http://doc.rust-lang.org/getopts) + +## Usage + +Add this to your `Cargo.toml`: + +```toml +[dependencies] +getopts = "0.2.4" +``` + +and this to your crate root: + +```rust +extern crate getopts; +``` diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 0000000000000..1fd68973b08e1 --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,1715 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// ignore-lexer-test FIXME #15677 + +//! Simple getopt alternative. +//! +//! Construct a vector of options, either by using `reqopt`, `optopt`, and +//! `optflag` or by building them from components yourself, and pass them to +//! `getopts`, along with a vector of actual arguments (not including +//! `argv[0]`). You'll either get a failure code back, or a match. You'll have +//! to verify whether the amount of 'free' arguments in the match is what you +//! expect. Use `opt_*` accessors to get argument values out of the matches +//! object. +//! +//! Single-character options are expected to appear on the command line with a +//! single preceding dash; multiple-character options are expected to be +//! proceeded by two dashes. Options that expect an argument accept their +//! argument following either a space or an equals sign. Single-character +//! options don't require the space. +//! +//! # Usage +//! +//! This crate is [on crates.io](https://crates.io/crates/getopts) and can be +//! used by adding `getopts` to the dependencies in your project's `Cargo.toml`. +//! +//! ```toml +//! [dependencies] +//! getopts = "0.2" +//! ``` +//! +//! and this to your crate root: +//! +//! ```rust +//! extern crate getopts; +//! ``` +//! +//! # Example +//! +//! The following example shows simple command line parsing for an application +//! that requires an input file to be specified, accepts an optional output file +//! name following `-o`, and accepts both `-h` and `--help` as optional flags. +//! +//! ```{.rust} +//! extern crate getopts; +//! use getopts::Options; +//! use std::env; +//! +//! fn do_work(inp: &str, out: Option) { +//! println!("{}", inp); +//! match out { +//! Some(x) => println!("{}", x), +//! None => println!("No Output"), +//! } +//! } +//! +//! fn print_usage(program: &str, opts: Options) { +//! let brief = format!("Usage: {} [options]", program); +//! print!("{}", opts.usage(&brief)); +//! } +//! +//! fn main() { +//! let args: Vec = env::args().collect(); +//! let program = args[0].clone(); +//! +//! let mut opts = Options::new(); +//! opts.optopt("o", "", "set output file name", "NAME"); +//! opts.optflag("h", "help", "print this help menu"); +//! let matches = match opts.parse(&args[1..]) { +//! Ok(m) => { m } +//! Err(f) => { panic!(f.to_string()) } +//! }; +//! if matches.opt_present("h") { +//! print_usage(&program, opts); +//! return; +//! } +//! let output = matches.opt_str("o"); +//! let input = if !matches.free.is_empty() { +//! matches.free[0].clone() +//! } else { +//! print_usage(&program, opts); +//! return; +//! }; +//! do_work(&input, output); +//! } +//! ``` + +#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "http://www.rust-lang.org/favicon.ico", + html_root_url = "http://doc.rust-lang.org/getopts/")] +#![deny(missing_docs)] +#![cfg_attr(test, deny(warnings))] +#![cfg_attr(rust_build, feature(staged_api))] +#![cfg_attr(rust_build, staged_api)] +#![cfg_attr(rust_build, + unstable(feature = "rustc_private", + reason = "use the crates.io `getopts` library instead"))] + +#[cfg(test)] #[macro_use] extern crate log; + +use self::Name::*; +use self::HasArg::*; +use self::Occur::*; +use self::Fail::*; +use self::Optval::*; +use self::SplitWithinState::*; +use self::Whitespace::*; +use self::LengthLimit::*; + +use std::ffi::OsStr; +use std::fmt; +use std::iter::{repeat, IntoIterator}; +use std::result; + +/// A description of the options that a program can handle +pub struct Options { + grps: Vec, + parsing_style : ParsingStyle +} + +impl Options { + /// Create a blank set of options + pub fn new() -> Options { + Options { + grps: Vec::new(), + parsing_style: ParsingStyle::FloatingFrees + } + } + + /// Set the parsing style + pub fn parsing_style(&mut self, style: ParsingStyle) -> &mut Options { + self.parsing_style = style; + self + } + + /// Create a generic option group, stating all parameters explicitly + pub fn opt(&mut self, short_name: &str, long_name: &str, desc: &str, + hint: &str, hasarg: HasArg, occur: Occur) -> &mut Options { + let len = short_name.len(); + assert!(len == 1 || len == 0); + self.grps.push(OptGroup { + short_name: short_name.to_string(), + long_name: long_name.to_string(), + hint: hint.to_string(), + desc: desc.to_string(), + hasarg: hasarg, + occur: occur + }); + self + } + + /// Create a long option that is optional and does not take an argument. + /// + /// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none + /// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none + /// * `desc` - Description for usage help + pub fn optflag(&mut self, short_name: &str, long_name: &str, desc: &str) + -> &mut Options { + let len = short_name.len(); + assert!(len == 1 || len == 0); + self.grps.push(OptGroup { + short_name: short_name.to_string(), + long_name: long_name.to_string(), + hint: "".to_string(), + desc: desc.to_string(), + hasarg: No, + occur: Optional + }); + self + } + + /// Create a long option that can occur more than once and does not + /// take an argument. + /// + /// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none + /// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none + /// * `desc` - Description for usage help + pub fn optflagmulti(&mut self, short_name: &str, long_name: &str, desc: &str) + -> &mut Options { + let len = short_name.len(); + assert!(len == 1 || len == 0); + self.grps.push(OptGroup { + short_name: short_name.to_string(), + long_name: long_name.to_string(), + hint: "".to_string(), + desc: desc.to_string(), + hasarg: No, + occur: Multi + }); + self + } + + /// Create a long option that is optional and takes an optional argument. + /// + /// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none + /// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none + /// * `desc` - Description for usage help + /// * `hint` - Hint that is used in place of the argument in the usage help, + /// e.g. `"FILE"` for a `-o FILE` option + pub fn optflagopt(&mut self, short_name: &str, long_name: &str, desc: &str, + hint: &str) -> &mut Options { + let len = short_name.len(); + assert!(len == 1 || len == 0); + self.grps.push(OptGroup { + short_name: short_name.to_string(), + long_name: long_name.to_string(), + hint: hint.to_string(), + desc: desc.to_string(), + hasarg: Maybe, + occur: Optional + }); + self + } + + /// Create a long option that is optional, takes an argument, and may occur + /// multiple times. + /// + /// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none + /// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none + /// * `desc` - Description for usage help + /// * `hint` - Hint that is used in place of the argument in the usage help, + /// e.g. `"FILE"` for a `-o FILE` option + pub fn optmulti(&mut self, short_name: &str, long_name: &str, desc: &str, hint: &str) + -> &mut Options { + let len = short_name.len(); + assert!(len == 1 || len == 0); + self.grps.push(OptGroup { + short_name: short_name.to_string(), + long_name: long_name.to_string(), + hint: hint.to_string(), + desc: desc.to_string(), + hasarg: Yes, + occur: Multi + }); + self + } + + /// Create a long option that is optional and takes an argument. + /// + /// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none + /// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none + /// * `desc` - Description for usage help + /// * `hint` - Hint that is used in place of the argument in the usage help, + /// e.g. `"FILE"` for a `-o FILE` option + pub fn optopt(&mut self, short_name: &str, long_name: &str, desc: &str, hint: &str) + -> &mut Options { + let len = short_name.len(); + assert!(len == 1 || len == 0); + self.grps.push(OptGroup { + short_name: short_name.to_string(), + long_name: long_name.to_string(), + hint: hint.to_string(), + desc: desc.to_string(), + hasarg: Yes, + occur: Optional + }); + self + } + + /// Create a long option that is required and takes an argument. + /// + /// * `short_name` - e.g. `"h"` for a `-h` option, or `""` for none + /// * `long_name` - e.g. `"help"` for a `--help` option, or `""` for none + /// * `desc` - Description for usage help + /// * `hint` - Hint that is used in place of the argument in the usage help, + /// e.g. `"FILE"` for a `-o FILE` option + pub fn reqopt(&mut self, short_name: &str, long_name: &str, desc: &str, hint: &str) + -> &mut Options { + let len = short_name.len(); + assert!(len == 1 || len == 0); + self.grps.push(OptGroup { + short_name: short_name.to_string(), + long_name: long_name.to_string(), + hint: hint.to_string(), + desc: desc.to_string(), + hasarg: Yes, + occur: Req + }); + self + } + + /// Parse command line arguments according to the provided options. + /// + /// On success returns `Ok(Matches)`. Use methods such as `opt_present` + /// `opt_str`, etc. to interrogate results. + /// # Panics + /// + /// Returns `Err(Fail)` on failure: use the `Debug` implementation of `Fail` + /// to display information about it. + pub fn parse(&self, args: C) -> Result + where C::Item: AsRef + { + let opts: Vec = self.grps.iter().map(|x| x.long_to_short()).collect(); + let n_opts = opts.len(); + + fn f(_x: usize) -> Vec { return Vec::new(); } + + let mut vals = (0 .. n_opts).map(f).collect::>(); + let mut free: Vec = Vec::new(); + let args = args.into_iter().map(|i| { + i.as_ref().to_str().unwrap().to_string() + }).collect::>(); + let l = args.len(); + let mut i = 0; + while i < l { + let cur = args[i].clone(); + let curlen = cur.len(); + if !is_arg(&cur) { + match self.parsing_style { + ParsingStyle::FloatingFrees => free.push(cur), + ParsingStyle::StopAtFirstFree => { + while i < l { + free.push(args[i].clone()); + i += 1; + } + break; + } + } + } else if cur == "--" { + let mut j = i + 1; + while j < l { free.push(args[j].clone()); j += 1; } + break; + } else if cur == "-" { + free.push(cur); + } else { + let mut names; + let mut i_arg = None; + if cur.as_bytes()[1] == b'-' { + let tail = &cur[2..curlen]; + let tail_eq: Vec<&str> = tail.split('=').collect(); + if tail_eq.len() <= 1 { + names = vec!(Long(tail.to_string())); + } else { + names = + vec!(Long(tail_eq[0].to_string())); + i_arg = Some(tail_eq[1].to_string()); + } + } else { + names = Vec::new(); + for (j, ch) in cur.char_indices().skip(1) { + let opt = Short(ch); + + /* In a series of potential options (eg. -aheJ), if we + see one which takes an argument, we assume all + subsequent characters make up the argument. This + allows options such as -L/usr/local/lib/foo to be + interpreted correctly + */ + + let opt_id = match find_opt(&opts, opt.clone()) { + Some(id) => id, + None => return Err(UnrecognizedOption(opt.to_string())) + }; + + names.push(opt); + + let arg_follows = match opts[opt_id].hasarg { + Yes | Maybe => true, + No => false + }; + + if arg_follows { + let next = j + ch.len_utf8(); + if next < cur.len() { + i_arg = Some(cur[next..curlen].to_string()); + break; + } + } + } + } + let mut name_pos = 0; + for nm in names.iter() { + name_pos += 1; + let optid = match find_opt(&opts, (*nm).clone()) { + Some(id) => id, + None => return Err(UnrecognizedOption(nm.to_string())) + }; + match opts[optid].hasarg { + No => { + if name_pos == names.len() && !i_arg.is_none() { + return Err(UnexpectedArgument(nm.to_string())); + } + vals[optid].push(Given); + } + Maybe => { + if !i_arg.is_none() { + vals[optid] + .push(Val((i_arg.clone()) + .unwrap())); + } else if name_pos < names.len() || i + 1 == l || + is_arg(&args[i + 1]) { + vals[optid].push(Given); + } else { + i += 1; + vals[optid].push(Val(args[i].clone())); + } + } + Yes => { + if !i_arg.is_none() { + vals[optid].push(Val(i_arg.clone().unwrap())); + } else if i + 1 == l { + return Err(ArgumentMissing(nm.to_string())); + } else { + i += 1; + vals[optid].push(Val(args[i].clone())); + } + } + } + } + } + i += 1; + } + for i in 0 .. n_opts { + let n = vals[i].len(); + let occ = opts[i].occur; + if occ == Req && n == 0 { + return Err(OptionMissing(opts[i].name.to_string())); + } + if occ != Multi && n > 1 { + return Err(OptionDuplicated(opts[i].name.to_string())); + } + } + Ok(Matches { + opts: opts, + vals: vals, + free: free + }) + } + + /// Derive a short one-line usage summary from a set of long options. + pub fn short_usage(&self, program_name: &str) -> String { + let mut line = format!("Usage: {} ", program_name); + line.push_str(&self.grps.iter() + .map(format_option) + .collect::>() + .connect(" ")); + line + } + + /// Derive a usage message from a set of long options. + pub fn usage(&self, brief: &str) -> String { + let desc_sep = format!("\n{}", repeat(" ").take(24).collect::()); + + let rows = self.grps.iter().map(|optref| { + let OptGroup{short_name, + long_name, + hint, + desc, + hasarg, + ..} = (*optref).clone(); + + let mut row = " ".to_string(); + + // short option + match short_name.len() { + 0 => {} + 1 => { + row.push('-'); + row.push_str(&short_name); + row.push(' '); + } + _ => panic!("the short name should only be 1 ascii char long"), + } + + // long option + match long_name.len() { + 0 => {} + _ => { + row.push_str("--"); + row.push_str(&long_name); + row.push(' '); + } + } + + // arg + match hasarg { + No => {} + Yes => row.push_str(&hint), + Maybe => { + row.push('['); + row.push_str(&hint); + row.push(']'); + } + } + + // FIXME: #5516 should be graphemes not codepoints + // here we just need to indent the start of the description + let rowlen = row.chars().count(); + if rowlen < 24 { + for _ in 0 .. 24 - rowlen { + row.push(' '); + } + } else { + row.push_str(&desc_sep) + } + + // Normalize desc to contain words separated by one space character + let mut desc_normalized_whitespace = String::new(); + for word in desc.split(|c: char| c.is_whitespace()) + .filter(|s| !s.is_empty()) { + desc_normalized_whitespace.push_str(word); + desc_normalized_whitespace.push(' '); + } + + // FIXME: #5516 should be graphemes not codepoints + let mut desc_rows = Vec::new(); + each_split_within(&desc_normalized_whitespace, + 54, + |substr| { + desc_rows.push(substr.to_string()); + true + }); + + // FIXME: #5516 should be graphemes not codepoints + // wrapped description + row.push_str(&desc_rows.connect(&desc_sep)); + + row + }); + + format!("{}\n\nOptions:\n{}\n", brief, + rows.collect::>().connect("\n")) + } +} + +/// What parsing style to use when parsing arguments +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum ParsingStyle { + /// Flags and "free" arguments can be freely inter-mixed. + FloatingFrees, + /// As soon as a "free" argument (i.e. non-flag) is encountered, stop + /// considering any remaining arguments as flags. + StopAtFirstFree +} + +/// Name of an option. Either a string or a single char. +#[derive(Clone, PartialEq, Eq)] +enum Name { + /// A string representing the long name of an option. + /// For example: "help" + Long(String), + /// A char representing the short name of an option. + /// For example: 'h' + Short(char), +} + +/// Describes whether an option has an argument. +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum HasArg { + /// The option requires an argument. + Yes, + /// The option takes no argument. + No, + /// The option argument is optional. + Maybe, +} + +/// Describes how often an option may occur. +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum Occur { + /// The option occurs once. + Req, + /// The option occurs at most once. + Optional, + /// The option occurs zero or more times. + Multi, +} + +/// A description of a possible option. +#[derive(Clone, PartialEq, Eq)] +struct Opt { + /// Name of the option + name: Name, + /// Whether it has an argument + hasarg: HasArg, + /// How often it can occur + occur: Occur, + /// Which options it aliases + aliases: Vec, +} + +/// One group of options, e.g., both `-h` and `--help`, along with +/// their shared description and properties. +#[derive(Clone, PartialEq, Eq)] +struct OptGroup { + /// Short name of the option, e.g. `h` for a `-h` option + short_name: String, + /// Long name of the option, e.g. `help` for a `--help` option + long_name: String, + /// Hint for argument, e.g. `FILE` for a `-o FILE` option + hint: String, + /// Description for usage help text + desc: String, + /// Whether option has an argument + hasarg: HasArg, + /// How often it can occur + occur: Occur +} + +/// Describes whether an option is given at all or has a value. +#[derive(Clone, PartialEq, Eq)] +enum Optval { + Val(String), + Given, +} + +/// The result of checking command line arguments. Contains a vector +/// of matches and a vector of free strings. +#[derive(Clone, PartialEq, Eq)] +pub struct Matches { + /// Options that matched + opts: Vec, + /// Values of the Options that matched + vals: Vec>, + /// Free string fragments + pub free: Vec, +} + +/// The type returned when the command line does not conform to the +/// expected format. Use the `Debug` implementation to output detailed +/// information. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Fail { + /// The option requires an argument but none was passed. + ArgumentMissing(String), + /// The passed option is not declared among the possible options. + UnrecognizedOption(String), + /// A required option is not present. + OptionMissing(String), + /// A single occurrence option is being used multiple times. + OptionDuplicated(String), + /// There's an argument being passed to a non-argument option. + UnexpectedArgument(String), +} + +/// The type of failure that occurred. +#[derive(Clone, Copy, PartialEq, Eq)] +#[allow(missing_docs)] +pub enum FailType { + ArgumentMissing_, + UnrecognizedOption_, + OptionMissing_, + OptionDuplicated_, + UnexpectedArgument_, +} + +/// The result of parsing a command line with a set of options. +pub type Result = result::Result; + +impl Name { + fn from_str(nm: &str) -> Name { + if nm.len() == 1 { + Short(nm.as_bytes()[0] as char) + } else { + Long(nm.to_string()) + } + } + + fn to_string(&self) -> String { + match *self { + Short(ch) => ch.to_string(), + Long(ref s) => s.to_string() + } + } +} + +impl OptGroup { + /// Translate OptGroup into Opt. + /// (Both short and long names correspond to different Opts). + fn long_to_short(&self) -> Opt { + let OptGroup { + short_name, + long_name, + hasarg, + occur, + .. + } = (*self).clone(); + + match (short_name.len(), long_name.len()) { + (0,0) => panic!("this long-format option was given no name"), + (0,_) => Opt { + name: Long((long_name)), + hasarg: hasarg, + occur: occur, + aliases: Vec::new() + }, + (1,0) => Opt { + name: Short(short_name.as_bytes()[0] as char), + hasarg: hasarg, + occur: occur, + aliases: Vec::new() + }, + (1,_) => Opt { + name: Long((long_name)), + hasarg: hasarg, + occur: occur, + aliases: vec!( + Opt { + name: Short(short_name.as_bytes()[0] as char), + hasarg: hasarg, + occur: occur, + aliases: Vec::new() + } + ) + }, + (_,_) => panic!("something is wrong with the long-form opt") + } + } +} + +impl Matches { + fn opt_vals(&self, nm: &str) -> Vec { + match find_opt(&self.opts, Name::from_str(nm)) { + Some(id) => self.vals[id].clone(), + None => panic!("No option '{}' defined", nm) + } + } + + fn opt_val(&self, nm: &str) -> Option { + self.opt_vals(nm).into_iter().next() + } + + /// Returns true if an option was matched. + pub fn opt_present(&self, nm: &str) -> bool { + !self.opt_vals(nm).is_empty() + } + + /// Returns the number of times an option was matched. + pub fn opt_count(&self, nm: &str) -> usize { + self.opt_vals(nm).len() + } + + /// Returns true if any of several options were matched. + pub fn opts_present(&self, names: &[String]) -> bool { + names.iter().any(|nm| { + match find_opt(&self.opts, Name::from_str(&nm)) { + Some(id) if !self.vals[id].is_empty() => true, + _ => false, + } + }) + } + + /// Returns the string argument supplied to one of several matching options or `None`. + pub fn opts_str(&self, names: &[String]) -> Option { + names.iter().filter_map(|nm| { + match self.opt_val(&nm) { + Some(Val(s)) => Some(s), + _ => None, + } + }).next() + } + + /// Returns a vector of the arguments provided to all matches of the given + /// option. + /// + /// Used when an option accepts multiple values. + pub fn opt_strs(&self, nm: &str) -> Vec { + self.opt_vals(nm).into_iter().filter_map(|v| { + match v { + Val(s) => Some(s), + _ => None, + } + }).collect() + } + + /// Returns the string argument supplied to a matching option or `None`. + pub fn opt_str(&self, nm: &str) -> Option { + match self.opt_val(nm) { + Some(Val(s)) => Some(s), + _ => None, + } + } + + + /// Returns the matching string, a default, or none. + /// + /// Returns none if the option was not present, `def` if the option was + /// present but no argument was provided, and the argument if the option was + /// present and an argument was provided. + pub fn opt_default(&self, nm: &str, def: &str) -> Option { + match self.opt_val(nm) { + Some(Val(s)) => Some(s), + Some(_) => Some(def.to_string()), + None => None, + } + } + +} + +fn is_arg(arg: &str) -> bool { + arg.as_bytes().get(0) == Some(&b'-') +} + +fn find_opt(opts: &[Opt], nm: Name) -> Option { + // Search main options. + let pos = opts.iter().position(|opt| opt.name == nm); + if pos.is_some() { + return pos + } + + // Search in aliases. + for candidate in opts.iter() { + if candidate.aliases.iter().position(|opt| opt.name == nm).is_some() { + return opts.iter().position(|opt| opt.name == candidate.name); + } + } + + None +} + +impl fmt::Display for Fail { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + ArgumentMissing(ref nm) => { + write!(f, "Argument to option '{}' missing.", *nm) + } + UnrecognizedOption(ref nm) => { + write!(f, "Unrecognized option: '{}'.", *nm) + } + OptionMissing(ref nm) => { + write!(f, "Required option '{}' missing.", *nm) + } + OptionDuplicated(ref nm) => { + write!(f, "Option '{}' given more than once.", *nm) + } + UnexpectedArgument(ref nm) => { + write!(f, "Option '{}' does not take an argument.", *nm) + } + } + } +} + +fn format_option(opt: &OptGroup) -> String { + let mut line = String::new(); + + if opt.occur != Req { + line.push('['); + } + + // Use short_name is possible, but fallback to long_name. + if opt.short_name.len() > 0 { + line.push('-'); + line.push_str(&opt.short_name); + } else { + line.push_str("--"); + line.push_str(&opt.long_name); + } + + if opt.hasarg != No { + line.push(' '); + if opt.hasarg == Maybe { + line.push('['); + } + line.push_str(&opt.hint); + if opt.hasarg == Maybe { + line.push(']'); + } + } + + if opt.occur != Req { + line.push(']'); + } + if opt.occur == Multi { + line.push_str(".."); + } + + line +} + +#[derive(Clone, Copy)] +enum SplitWithinState { + A, // leading whitespace, initial state + B, // words + C, // internal and trailing whitespace +} + +#[derive(Clone, Copy)] +enum Whitespace { + Ws, // current char is whitespace + Cr // current char is not whitespace +} + +#[derive(Clone, Copy)] +enum LengthLimit { + UnderLim, // current char makes current substring still fit in limit + OverLim // current char makes current substring no longer fit in limit +} + + +/// Splits a string into substrings with possibly internal whitespace, +/// each of them at most `lim` bytes long. The substrings have leading and trailing +/// whitespace removed, and are only cut at whitespace boundaries. +/// +/// Note: Function was moved here from `std::str` because this module is the only place that +/// uses it, and because it was too specific for a general string function. +/// +/// # Panics +/// +/// Panics during iteration if the string contains a non-whitespace +/// sequence longer than the limit. +fn each_split_within<'a, F>(ss: &'a str, lim: usize, mut it: F) + -> bool where F: FnMut(&'a str) -> bool { + // Just for fun, let's write this as a state machine: + + let mut slice_start = 0; + let mut last_start = 0; + let mut last_end = 0; + let mut state = A; + let mut fake_i = ss.len(); + let mut lim = lim; + + let mut cont = true; + + // if the limit is larger than the string, lower it to save cycles + if lim >= fake_i { + lim = fake_i; + } + + let mut machine = |cont: &mut bool, (i, c): (usize, char)| { + let whitespace = if c.is_whitespace() { Ws } else { Cr }; + let limit = if (i - slice_start + 1) <= lim { UnderLim } else { OverLim }; + + state = match (state, whitespace, limit) { + (A, Ws, _) => { A } + (A, Cr, _) => { slice_start = i; last_start = i; B } + + (B, Cr, UnderLim) => { B } + (B, Cr, OverLim) if (i - last_start + 1) > lim + => panic!("word starting with {} longer than limit!", + &ss[last_start..i + 1]), + (B, Cr, OverLim) => { + *cont = it(&ss[slice_start..last_end]); + slice_start = last_start; + B + } + (B, Ws, UnderLim) => { + last_end = i; + C + } + (B, Ws, OverLim) => { + last_end = i; + *cont = it(&ss[slice_start..last_end]); + A + } + + (C, Cr, UnderLim) => { + last_start = i; + B + } + (C, Cr, OverLim) => { + *cont = it(&ss[slice_start..last_end]); + slice_start = i; + last_start = i; + last_end = i; + B + } + (C, Ws, OverLim) => { + *cont = it(&ss[slice_start..last_end]); + A + } + (C, Ws, UnderLim) => { + C + } + }; + + *cont + }; + + ss.char_indices().all(|x| machine(&mut cont, x)); + + // Let the automaton 'run out' by supplying trailing whitespace + while cont && match state { B | C => true, A => false } { + machine(&mut cont, (fake_i, ' ')); + fake_i += 1; + } + return cont; +} + +#[test] +fn test_split_within() { + fn t(s: &str, i: usize, u: &[String]) { + let mut v = Vec::new(); + each_split_within(s, i, |s| { v.push(s.to_string()); true }); + assert!(v.iter().zip(u.iter()).all(|(a,b)| a == b)); + } + t("", 0, &[]); + t("", 15, &[]); + t("hello", 15, &["hello".to_string()]); + t("\nMary had a little lamb\nLittle lamb\n", 15, &[ + "Mary had a".to_string(), + "little lamb".to_string(), + "Little lamb".to_string() + ]); + t("\nMary had a little lamb\nLittle lamb\n", ::std::usize::MAX, + &["Mary had a little lamb\nLittle lamb".to_string()]); +} + +#[cfg(test)] +mod tests { + use super::{HasArg, Name, Occur, Opt, Options, ParsingStyle}; + use super::Fail::*; + + // Tests for reqopt + #[test] + fn test_reqopt() { + let long_args = vec!("--test=20".to_string()); + let mut opts = Options::new(); + opts.reqopt("t", "test", "testing", "TEST"); + match opts.parse(&long_args) { + Ok(ref m) => { + assert!(m.opt_present("test")); + assert_eq!(m.opt_str("test").unwrap(), "20"); + assert!(m.opt_present("t")); + assert_eq!(m.opt_str("t").unwrap(), "20"); + } + _ => { panic!("test_reqopt failed (long arg)"); } + } + let short_args = vec!("-t".to_string(), "20".to_string()); + match opts.parse(&short_args) { + Ok(ref m) => { + assert!((m.opt_present("test"))); + assert_eq!(m.opt_str("test").unwrap(), "20"); + assert!((m.opt_present("t"))); + assert_eq!(m.opt_str("t").unwrap(), "20"); + } + _ => { panic!("test_reqopt failed (short arg)"); } + } + } + + #[test] + fn test_reqopt_missing() { + let args = vec!("blah".to_string()); + match Options::new() + .reqopt("t", "test", "testing", "TEST") + .parse(&args) { + Err(OptionMissing(_)) => {}, + _ => panic!() + } + } + + #[test] + fn test_reqopt_no_arg() { + let long_args = vec!("--test".to_string()); + let mut opts = Options::new(); + opts.reqopt("t", "test", "testing", "TEST"); + match opts.parse(&long_args) { + Err(ArgumentMissing(_)) => {}, + _ => panic!() + } + let short_args = vec!("-t".to_string()); + match opts.parse(&short_args) { + Err(ArgumentMissing(_)) => {}, + _ => panic!() + } + } + + #[test] + fn test_reqopt_multi() { + let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string()); + match Options::new() + .reqopt("t", "test", "testing", "TEST") + .parse(&args) { + Err(OptionDuplicated(_)) => {}, + _ => panic!() + } + } + + // Tests for optopt + #[test] + fn test_optopt() { + let long_args = vec!("--test=20".to_string()); + let mut opts = Options::new(); + opts.optopt("t", "test", "testing", "TEST"); + match opts.parse(&long_args) { + Ok(ref m) => { + assert!(m.opt_present("test")); + assert_eq!(m.opt_str("test").unwrap(), "20"); + assert!((m.opt_present("t"))); + assert_eq!(m.opt_str("t").unwrap(), "20"); + } + _ => panic!() + } + let short_args = vec!("-t".to_string(), "20".to_string()); + match opts.parse(&short_args) { + Ok(ref m) => { + assert!((m.opt_present("test"))); + assert_eq!(m.opt_str("test").unwrap(), "20"); + assert!((m.opt_present("t"))); + assert_eq!(m.opt_str("t").unwrap(), "20"); + } + _ => panic!() + } + } + + #[test] + fn test_optopt_missing() { + let args = vec!("blah".to_string()); + match Options::new() + .optopt("t", "test", "testing", "TEST") + .parse(&args) { + Ok(ref m) => { + assert!(!m.opt_present("test")); + assert!(!m.opt_present("t")); + } + _ => panic!() + } + } + + #[test] + fn test_optopt_no_arg() { + let long_args = vec!("--test".to_string()); + let mut opts = Options::new(); + opts.optopt("t", "test", "testing", "TEST"); + match opts.parse(&long_args) { + Err(ArgumentMissing(_)) => {}, + _ => panic!() + } + let short_args = vec!("-t".to_string()); + match opts.parse(&short_args) { + Err(ArgumentMissing(_)) => {}, + _ => panic!() + } + } + + #[test] + fn test_optopt_multi() { + let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string()); + match Options::new() + .optopt("t", "test", "testing", "TEST") + .parse(&args) { + Err(OptionDuplicated(_)) => {}, + _ => panic!() + } + } + + // Tests for optflag + #[test] + fn test_optflag() { + let long_args = vec!("--test".to_string()); + let mut opts = Options::new(); + opts.optflag("t", "test", "testing"); + match opts.parse(&long_args) { + Ok(ref m) => { + assert!(m.opt_present("test")); + assert!(m.opt_present("t")); + } + _ => panic!() + } + let short_args = vec!("-t".to_string()); + match opts.parse(&short_args) { + Ok(ref m) => { + assert!(m.opt_present("test")); + assert!(m.opt_present("t")); + } + _ => panic!() + } + } + + #[test] + fn test_optflag_missing() { + let args = vec!("blah".to_string()); + match Options::new() + .optflag("t", "test", "testing") + .parse(&args) { + Ok(ref m) => { + assert!(!m.opt_present("test")); + assert!(!m.opt_present("t")); + } + _ => panic!() + } + } + + #[test] + fn test_optflag_long_arg() { + let args = vec!("--test=20".to_string()); + match Options::new() + .optflag("t", "test", "testing") + .parse(&args) { + Err(UnexpectedArgument(_)) => {}, + _ => panic!() + } + } + + #[test] + fn test_optflag_multi() { + let args = vec!("--test".to_string(), "-t".to_string()); + match Options::new() + .optflag("t", "test", "testing") + .parse(&args) { + Err(OptionDuplicated(_)) => {}, + _ => panic!() + } + } + + #[test] + fn test_optflag_short_arg() { + let args = vec!("-t".to_string(), "20".to_string()); + match Options::new() + .optflag("t", "test", "testing") + .parse(&args) { + Ok(ref m) => { + // The next variable after the flag is just a free argument + + assert!(m.free[0] == "20"); + } + _ => panic!() + } + } + + // Tests for optflagmulti + #[test] + fn test_optflagmulti_short1() { + let args = vec!("-v".to_string()); + match Options::new() + .optflagmulti("v", "verbose", "verbosity") + .parse(&args) { + Ok(ref m) => { + assert_eq!(m.opt_count("v"), 1); + } + _ => panic!() + } + } + + #[test] + fn test_optflagmulti_short2a() { + let args = vec!("-v".to_string(), "-v".to_string()); + match Options::new() + .optflagmulti("v", "verbose", "verbosity") + .parse(&args) { + Ok(ref m) => { + assert_eq!(m.opt_count("v"), 2); + } + _ => panic!() + } + } + + #[test] + fn test_optflagmulti_short2b() { + let args = vec!("-vv".to_string()); + match Options::new() + .optflagmulti("v", "verbose", "verbosity") + .parse(&args) { + Ok(ref m) => { + assert_eq!(m.opt_count("v"), 2); + } + _ => panic!() + } + } + + #[test] + fn test_optflagmulti_long1() { + let args = vec!("--verbose".to_string()); + match Options::new() + .optflagmulti("v", "verbose", "verbosity") + .parse(&args) { + Ok(ref m) => { + assert_eq!(m.opt_count("verbose"), 1); + } + _ => panic!() + } + } + + #[test] + fn test_optflagmulti_long2() { + let args = vec!("--verbose".to_string(), "--verbose".to_string()); + match Options::new() + .optflagmulti("v", "verbose", "verbosity") + .parse(&args) { + Ok(ref m) => { + assert_eq!(m.opt_count("verbose"), 2); + } + _ => panic!() + } + } + + #[test] + fn test_optflagmulti_mix() { + let args = vec!("--verbose".to_string(), "-v".to_string(), + "-vv".to_string(), "verbose".to_string()); + match Options::new() + .optflagmulti("v", "verbose", "verbosity") + .parse(&args) { + Ok(ref m) => { + assert_eq!(m.opt_count("verbose"), 4); + assert_eq!(m.opt_count("v"), 4); + } + _ => panic!() + } + } + + // Tests for optflagopt + #[test] + fn test_optflagopt() { + let long_args = vec!("--test".to_string()); + let mut opts = Options::new(); + opts.optflag("t", "test", "testing"); + match opts.parse(&long_args) { + Ok(ref m) => { + assert!(m.opt_present("test")); + assert!(m.opt_present("t")); + } + _ => panic!() + } + let short_args = vec!("-t".to_string()); + match opts.parse(&short_args) { + Ok(ref m) => { + assert!(m.opt_present("test")); + assert!(m.opt_present("t")); + } + _ => panic!() + } + let no_args: Vec = vec!(); + match opts.parse(&no_args) { + Ok(ref m) => { + assert!(!m.opt_present("test")); + assert!(!m.opt_present("t")); + } + _ => panic!() + } + } + + // Tests for optmulti + #[test] + fn test_optmulti() { + let long_args = vec!("--test=20".to_string()); + let mut opts = Options::new(); + opts.optmulti("t", "test", "testing", "TEST"); + match opts.parse(&long_args) { + Ok(ref m) => { + assert!((m.opt_present("test"))); + assert_eq!(m.opt_str("test").unwrap(), "20"); + assert!((m.opt_present("t"))); + assert_eq!(m.opt_str("t").unwrap(), "20"); + } + _ => panic!() + } + let short_args = vec!("-t".to_string(), "20".to_string()); + match opts.parse(&short_args) { + Ok(ref m) => { + assert!((m.opt_present("test"))); + assert_eq!(m.opt_str("test").unwrap(), "20"); + assert!((m.opt_present("t"))); + assert_eq!(m.opt_str("t").unwrap(), "20"); + } + _ => panic!() + } + } + + #[test] + fn test_optmulti_missing() { + let args = vec!("blah".to_string()); + match Options::new() + .optmulti("t", "test", "testing", "TEST") + .parse(&args) { + Ok(ref m) => { + assert!(!m.opt_present("test")); + assert!(!m.opt_present("t")); + } + _ => panic!() + } + } + + #[test] + fn test_optmulti_no_arg() { + let long_args = vec!("--test".to_string()); + let mut opts = Options::new(); + opts.optmulti("t", "test", "testing", "TEST"); + match opts.parse(&long_args) { + Err(ArgumentMissing(_)) => {}, + _ => panic!() + } + let short_args = vec!("-t".to_string()); + match opts.parse(&short_args) { + Err(ArgumentMissing(_)) => {}, + _ => panic!() + } + } + + #[test] + fn test_optmulti_multi() { + let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string()); + match Options::new() + .optmulti("t", "test", "testing", "TEST") + .parse(&args) { + Ok(ref m) => { + assert!(m.opt_present("test")); + assert_eq!(m.opt_str("test").unwrap(), "20"); + assert!(m.opt_present("t")); + assert_eq!(m.opt_str("t").unwrap(), "20"); + let pair = m.opt_strs("test"); + assert!(pair[0] == "20"); + assert!(pair[1] == "30"); + } + _ => panic!() + } + } + + #[test] + fn test_free_argument_is_hyphen() { + let args = vec!("-".to_string()); + match Options::new().parse(&args) { + Ok(ref m) => { + assert_eq!(m.free.len(), 1); + assert_eq!(m.free[0], "-"); + } + _ => panic!() + } + } + + #[test] + fn test_unrecognized_option() { + let long_args = vec!("--untest".to_string()); + let mut opts = Options::new(); + opts.optmulti("t", "test", "testing", "TEST"); + match opts.parse(&long_args) { + Err(UnrecognizedOption(_)) => {}, + _ => panic!() + } + let short_args = vec!("-u".to_string()); + match opts.parse(&short_args) { + Err(UnrecognizedOption(_)) => {}, + _ => panic!() + } + } + + #[test] + fn test_combined() { + let args = + vec!("prog".to_string(), + "free1".to_string(), + "-s".to_string(), + "20".to_string(), + "free2".to_string(), + "--flag".to_string(), + "--long=30".to_string(), + "-f".to_string(), + "-m".to_string(), + "40".to_string(), + "-m".to_string(), + "50".to_string(), + "-n".to_string(), + "-A B".to_string(), + "-n".to_string(), + "-60 70".to_string()); + match Options::new() + .optopt("s", "something", "something", "SOMETHING") + .optflag("", "flag", "a flag") + .reqopt("", "long", "hi", "LONG") + .optflag("f", "", "another flag") + .optmulti("m", "", "mmmmmm", "YUM") + .optmulti("n", "", "nothing", "NOTHING") + .optopt("", "notpresent", "nothing to see here", "NOPE") + .parse(&args) { + Ok(ref m) => { + assert!(m.free[0] == "prog"); + assert!(m.free[1] == "free1"); + assert_eq!(m.opt_str("s").unwrap(), "20"); + assert!(m.free[2] == "free2"); + assert!((m.opt_present("flag"))); + assert_eq!(m.opt_str("long").unwrap(), "30"); + assert!((m.opt_present("f"))); + let pair = m.opt_strs("m"); + assert!(pair[0] == "40"); + assert!(pair[1] == "50"); + let pair = m.opt_strs("n"); + assert!(pair[0] == "-A B"); + assert!(pair[1] == "-60 70"); + assert!((!m.opt_present("notpresent"))); + } + _ => panic!() + } + } + + #[test] + fn test_mixed_stop() { + let args = + vec!("-a".to_string(), + "b".to_string(), + "-c".to_string(), + "d".to_string()); + match Options::new() + .parsing_style(ParsingStyle::StopAtFirstFree) + .optflag("a", "", "") + .optopt("c", "", "", "") + .parse(&args) { + Ok(ref m) => { + println!("{}", m.opt_present("c")); + assert!(m.opt_present("a")); + assert!(!m.opt_present("c")); + assert_eq!(m.free.len(), 3); + assert_eq!(m.free[0], "b"); + assert_eq!(m.free[1], "-c"); + assert_eq!(m.free[2], "d"); + } + _ => panic!() + } + } + + #[test] + fn test_multi() { + let mut opts = Options::new(); + opts.optopt("e", "", "encrypt", "ENCRYPT"); + opts.optopt("", "encrypt", "encrypt", "ENCRYPT"); + opts.optopt("f", "", "flag", "FLAG"); + + let args_single = vec!("-e".to_string(), "foo".to_string()); + let matches_single = &match opts.parse(&args_single) { + Ok(m) => m, + Err(_) => panic!() + }; + assert!(matches_single.opts_present(&["e".to_string()])); + assert!(matches_single.opts_present(&["encrypt".to_string(), "e".to_string()])); + assert!(matches_single.opts_present(&["e".to_string(), "encrypt".to_string()])); + assert!(!matches_single.opts_present(&["encrypt".to_string()])); + assert!(!matches_single.opts_present(&["thing".to_string()])); + assert!(!matches_single.opts_present(&[])); + + assert_eq!(matches_single.opts_str(&["e".to_string()]).unwrap(), "foo"); + assert_eq!(matches_single.opts_str(&["e".to_string(), "encrypt".to_string()]).unwrap(), + "foo"); + assert_eq!(matches_single.opts_str(&["encrypt".to_string(), "e".to_string()]).unwrap(), + "foo"); + + let args_both = vec!("-e".to_string(), "foo".to_string(), "--encrypt".to_string(), + "foo".to_string()); + let matches_both = &match opts.parse(&args_both) { + Ok(m) => m, + Err(_) => panic!() + }; + assert!(matches_both.opts_present(&["e".to_string()])); + assert!(matches_both.opts_present(&["encrypt".to_string()])); + assert!(matches_both.opts_present(&["encrypt".to_string(), "e".to_string()])); + assert!(matches_both.opts_present(&["e".to_string(), "encrypt".to_string()])); + assert!(!matches_both.opts_present(&["f".to_string()])); + assert!(!matches_both.opts_present(&["thing".to_string()])); + assert!(!matches_both.opts_present(&[])); + + assert_eq!(matches_both.opts_str(&["e".to_string()]).unwrap(), "foo"); + assert_eq!(matches_both.opts_str(&["encrypt".to_string()]).unwrap(), "foo"); + assert_eq!(matches_both.opts_str(&["e".to_string(), "encrypt".to_string()]).unwrap(), + "foo"); + assert_eq!(matches_both.opts_str(&["encrypt".to_string(), "e".to_string()]).unwrap(), + "foo"); + } + + #[test] + fn test_nospace() { + let args = vec!("-Lfoo".to_string(), "-M.".to_string()); + let matches = &match Options::new() + .optmulti("L", "", "library directory", "LIB") + .optmulti("M", "", "something", "MMMM") + .parse(&args) { + Ok(m) => m, + Err(_) => panic!() + }; + assert!(matches.opts_present(&["L".to_string()])); + assert_eq!(matches.opts_str(&["L".to_string()]).unwrap(), "foo"); + assert!(matches.opts_present(&["M".to_string()])); + assert_eq!(matches.opts_str(&["M".to_string()]).unwrap(), "."); + + } + + #[test] + fn test_nospace_conflict() { + let args = vec!("-vvLverbose".to_string(), "-v".to_string() ); + let matches = &match Options::new() + .optmulti("L", "", "library directory", "LIB") + .optflagmulti("v", "verbose", "Verbose") + .parse(&args) { + Ok(m) => m, + Err(e) => panic!( "{}", e ) + }; + assert!(matches.opts_present(&["L".to_string()])); + assert_eq!(matches.opts_str(&["L".to_string()]).unwrap(), "verbose"); + assert!(matches.opts_present(&["v".to_string()])); + assert_eq!(3, matches.opt_count("v")); + } + + #[test] + fn test_long_to_short() { + let mut short = Opt { + name: Name::Long("banana".to_string()), + hasarg: HasArg::Yes, + occur: Occur::Req, + aliases: Vec::new(), + }; + short.aliases = vec!(Opt { name: Name::Short('b'), + hasarg: HasArg::Yes, + occur: Occur::Req, + aliases: Vec::new() }); + let mut opts = Options::new(); + opts.reqopt("b", "banana", "some bananas", "VAL"); + let ref verbose = opts.grps[0]; + assert!(verbose.long_to_short() == short); + } + + #[test] + fn test_aliases_long_and_short() { + let args = vec!("-a".to_string(), "--apple".to_string(), "-a".to_string()); + + let matches = Options::new() + .optflagmulti("a", "apple", "Desc") + .parse(&args) + .unwrap(); + assert_eq!(3, matches.opt_count("a")); + assert_eq!(3, matches.opt_count("apple")); + } + + #[test] + fn test_usage() { + let mut opts = Options::new(); + opts.reqopt("b", "banana", "Desc", "VAL"); + opts.optopt("a", "012345678901234567890123456789", + "Desc", "VAL"); + opts.optflag("k", "kiwi", "Desc"); + opts.optflagopt("p", "", "Desc", "VAL"); + opts.optmulti("l", "", "Desc", "VAL"); + + let expected = +"Usage: fruits + +Options: + -b --banana VAL Desc + -a --012345678901234567890123456789 VAL + Desc + -k --kiwi Desc + -p [VAL] Desc + -l VAL Desc +"; + + let generated_usage = opts.usage("Usage: fruits"); + + debug!("expected: <<{}>>", expected); + debug!("generated: <<{}>>", generated_usage); + assert_eq!(generated_usage, expected); + } + + #[test] + fn test_usage_description_wrapping() { + // indentation should be 24 spaces + // lines wrap after 78: or rather descriptions wrap after 54 + + let mut opts = Options::new(); + opts.optflag("k", "kiwi", + "This is a long description which won't be wrapped..+.."); // 54 + opts.optflag("a", "apple", + "This is a long description which _will_ be wrapped..+.."); + + let expected = +"Usage: fruits + +Options: + -k --kiwi This is a long description which won't be wrapped..+.. + -a --apple This is a long description which _will_ be + wrapped..+.. +"; + + let usage = opts.usage("Usage: fruits"); + + debug!("expected: <<{}>>", expected); + debug!("generated: <<{}>>", usage); + assert!(usage == expected) + } + + #[test] + fn test_usage_description_multibyte_handling() { + let mut opts = Options::new(); + opts.optflag("k", "k\u{2013}w\u{2013}", + "The word kiwi is normally spelled with two i's"); + opts.optflag("a", "apple", + "This \u{201C}description\u{201D} has some characters that could \ +confuse the line wrapping; an apple costs 0.51€ in some parts of Europe."); + + let expected = +"Usage: fruits + +Options: + -k --k–w– The word kiwi is normally spelled with two i's + -a --apple This “description” has some characters that could + confuse the line wrapping; an apple costs 0.51€ in + some parts of Europe. +"; + + let usage = opts.usage("Usage: fruits"); + + debug!("expected: <<{}>>", expected); + debug!("generated: <<{}>>", usage); + assert!(usage == expected) + } + + #[test] + fn test_short_usage() { + let mut opts = Options::new(); + opts.reqopt("b", "banana", "Desc", "VAL"); + opts.optopt("a", "012345678901234567890123456789", + "Desc", "VAL"); + opts.optflag("k", "kiwi", "Desc"); + opts.optflagopt("p", "", "Desc", "VAL"); + opts.optmulti("l", "", "Desc", "VAL"); + + let expected = "Usage: fruits -b VAL [-a VAL] [-k] [-p [VAL]] [-l VAL]..".to_string(); + let generated_usage = opts.short_usage("fruits"); + + debug!("expected: <<{}>>", expected); + debug!("generated: <<{}>>", generated_usage); + assert_eq!(generated_usage, expected); + } +} diff --git a/tests/smoke.rs b/tests/smoke.rs new file mode 100644 index 0000000000000..a46f9c0167ab3 --- /dev/null +++ b/tests/smoke.rs @@ -0,0 +1,8 @@ +extern crate getopts; + +use std::env; + +#[test] +fn main() { + getopts::Options::new().parse(env::args()).unwrap(); +} From 54c68362ab583118a69d5c541da26517b86e4292 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 5 Jun 2015 15:09:55 -0700 Subject: [PATCH 5/6] Squashed 'src/external/rustc_serialize/' changes from e3115f3..cba058f cba058f Add decoding into boxed slices git-subtree-dir: src/external/rustc_serialize git-subtree-split: cba058fb8bd106d9dcdef773335a633ca4aaa2ce --- src/external/rustc_serialize/src/serialize.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/external/rustc_serialize/src/serialize.rs b/src/external/rustc_serialize/src/serialize.rs index 52507ca8ccc9b..436226c285980 100644 --- a/src/external/rustc_serialize/src/serialize.rs +++ b/src/external/rustc_serialize/src/serialize.rs @@ -417,6 +417,13 @@ impl< T: Decodable> Decodable for Box { } } +impl< T: Decodable> Decodable for Box<[T]> { + fn decode(d: &mut D) -> Result, D::Error> { + let v: Vec = try!(Decodable::decode(d)); + Ok(v.into_boxed_slice()) + } +} + impl Encodable for Rc { #[inline] fn encode(&self, s: &mut S) -> Result<(), S::Error> { From 52453d8bd65f57a58db9a68b969b16695f9255fa Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 5 Jun 2015 09:52:18 -0700 Subject: [PATCH 6/6] Update distribution with new external crates This commit updates our own makefiles to take into account external crates in the `src/external` folder now. Additionally it updates all current consumers of the crates that have transitioned to their external sources to the new APIs that are upstream. This commit also adds a README to the `src/external` folder explaining the management strategy for those libraries. --- mk/crates.mk | 60 +++-- mk/dist.mk | 4 +- src/compiletest/compiletest.rs | 83 +++--- src/etc/tidy.py | 1 + src/external/README.md | 42 +++ src/librbml/lib.rs | 10 +- src/librustc/lib.rs | 4 +- src/librustc/metadata/decoder.rs | 2 +- src/librustc/metadata/encoder.rs | 2 +- src/librustc/middle/astencode.rs | 38 +-- src/librustc/session/config.rs | 251 ++++++------------ src/librustc_back/lib.rs | 2 +- src/librustc_back/sha2.rs | 4 +- src/librustc_back/target/mod.rs | 10 +- src/librustc_data_structures/lib.rs | 2 +- src/librustc_driver/driver.rs | 2 +- src/librustc_driver/lib.rs | 77 ++---- src/librustc_trans/back/link.rs | 2 +- src/librustc_trans/lib.rs | 2 +- src/librustdoc/html/render.rs | 2 +- src/librustdoc/lib.rs | 102 ++++--- src/librustdoc/plugins.rs | 2 +- src/libsyntax/ast.rs | 6 +- src/libsyntax/codemap.rs | 2 +- src/libsyntax/diagnostic.rs | 16 +- src/libsyntax/lib.rs | 4 +- src/libsyntax/owned_slice.rs | 2 +- src/libsyntax/parse/token.rs | 2 +- src/libsyntax/ptr.rs | 2 +- src/libtest/lib.rs | 48 ++-- src/test/bench/shootout-pfib.rs | 13 +- .../compile-fail-fulldeps/unstable-crates.rs | 35 +++ src/test/run-pass/derive-no-std.rs | 2 +- .../deriving-encodable-decodable-box.rs | 8 +- ...riving-encodable-decodable-cell-refcell.rs | 10 +- src/test/run-pass/deriving-global.rs | 8 +- src/test/run-pass/extern-mod-syntax.rs | 4 +- src/test/run-pass/getopts_ref.rs | 15 +- src/test/run-pass/issue-11881.rs | 10 +- src/test/run-pass/issue-14021.rs | 10 +- src/test/run-pass/issue-15924.rs | 6 +- src/test/run-pass/issue-2804.rs | 4 +- src/test/run-pass/issue-4016.rs | 7 +- src/test/run-pass/issue-4036.rs | 7 +- 44 files changed, 454 insertions(+), 471 deletions(-) create mode 100644 src/external/README.md create mode 100644 src/test/compile-fail-fulldeps/unstable-crates.rs diff --git a/mk/crates.mk b/mk/crates.mk index 93be1e6ba6326..6c4b983b40e85 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -29,6 +29,11 @@ # the HOST_CRATES set, but the HOST_CRATES set can depend on target # crates. # +# EXTERNAL_CRATES +# These crates are all imported using the git-subtree command and have a +# slightly different structure as they're primarily intended to be built +# with Cargo, but we build them manually here. +# # TOOLS # A list of all tools which will be built as part of the compilation # process. It is currently assumed that most tools are built through @@ -49,15 +54,16 @@ # automatically generated for all stage/host/target combinations. ################################################################################ -TARGET_CRATES := libc std flate arena term \ - serialize getopts collections test rand \ +TARGET_CRATES := libc std flate arena \ + collections test rand \ log graphviz core rbml alloc \ rustc_unicode rustc_bitflags RUSTC_CRATES := rustc rustc_typeck rustc_borrowck rustc_resolve rustc_driver \ rustc_trans rustc_back rustc_llvm rustc_privacy rustc_lint \ rustc_data_structures HOST_CRATES := syntax $(RUSTC_CRATES) rustdoc fmt_macros -CRATES := $(TARGET_CRATES) $(HOST_CRATES) +EXTERNAL_CRATES := term rustc_serialize getopts +CRATES := $(TARGET_CRATES) $(HOST_CRATES) $(EXTERNAL_CRATES) TOOLS := compiletest rustdoc rustc rustbook error-index-generator DEPS_core := @@ -68,37 +74,37 @@ DEPS_std := core libc rand alloc collections rustc_unicode \ native:rust_builtin native:backtrace native:rustrt_native \ rustc_bitflags DEPS_graphviz := std -DEPS_syntax := std term serialize log fmt_macros arena libc +DEPS_syntax := std term rustc_serialize log fmt_macros arena libc DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \ - rustc_typeck rustc_resolve log syntax serialize rustc_llvm \ + rustc_typeck rustc_resolve log syntax rustc_serialize rustc_llvm \ rustc_trans rustc_privacy rustc_lint DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \ - log syntax serialize rustc_llvm + log syntax rustc_serialize rustc_llvm DEPS_rustc_typeck := rustc syntax DEPS_rustc_borrowck := rustc log graphviz syntax DEPS_rustc_resolve := rustc log syntax DEPS_rustc_privacy := rustc log syntax DEPS_rustc_lint := rustc log syntax -DEPS_rustc := syntax flate arena serialize getopts rbml \ +DEPS_rustc := syntax flate arena rustc_serialize getopts rbml \ log graphviz rustc_llvm rustc_back rustc_data_structures DEPS_rustc_llvm := native:rustllvm libc std DEPS_rustc_back := std syntax rustc_llvm flate log libc -DEPS_rustc_data_structures := std log serialize -DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts \ +DEPS_rustc_data_structures := std log rustc_serialize +DEPS_rustdoc := rustc rustc_driver native:hoedown rustc_serialize getopts \ test rustc_lint DEPS_rustc_bitflags := core DEPS_flate := std native:miniz DEPS_arena := std DEPS_graphviz := std DEPS_glob := std -DEPS_serialize := std log -DEPS_rbml := std log serialize -DEPS_term := std log +DEPS_rustc_serialize := std +DEPS_rbml := std log rustc_serialize +DEPS_term := std DEPS_getopts := std DEPS_collections := core alloc rustc_unicode DEPS_num := std -DEPS_test := std getopts serialize rbml term native:rust_test_helpers +DEPS_test := std getopts rustc_serialize rbml term native:rust_test_helpers DEPS_rand := core DEPS_log := std DEPS_fmt_macros = std @@ -107,7 +113,7 @@ TOOL_DEPS_compiletest := test getopts TOOL_DEPS_rustdoc := rustdoc TOOL_DEPS_rustc := rustc_driver TOOL_DEPS_rustbook := std rustdoc -TOOL_DEPS_error-index-generator := rustdoc syntax serialize +TOOL_DEPS_error-index-generator := rustdoc syntax rustc_serialize TOOL_SOURCE_compiletest := $(S)src/compiletest/compiletest.rs TOOL_SOURCE_rustdoc := $(S)src/driver/driver.rs TOOL_SOURCE_rustc := $(S)src/driver/driver.rs @@ -136,11 +142,33 @@ DOC_CRATES := std alloc collections core libc rustc_unicode define RUST_CRATE CRATEFILE_$(1) := $$(S)src/lib$(1)/lib.rs RSINPUTS_$(1) := $$(call rwildcard,$(S)src/lib$(1)/,*.rs) +endef + +$(foreach crate,$(TARGET_CRATES),$(eval $(call RUST_CRATE,$(crate)))) +$(foreach crate,$(HOST_CRATES),$(eval $(call RUST_CRATE,$(crate)))) + +# Distinct from the above macro, this generates the variables needed for +# external crates (located in the src/external folder). These crates are +# typically intended to be built with Cargo so we need to pass some extra flags +# and use a different source location. +# +# $(1) is the crate to generate variables for +define EXTERNAL_CRATE +CRATEFILE_$(1) := $$(S)src/external/$(1)/src/lib.rs +RUSTFLAGS_$(1) += --crate-type rlib,dylib --crate-name $(1) --cfg rust_build +RSINPUTS_$(1) := $$(call rwildcard,$(S)src/external/$(1)/src/,*.rs) +endef + +$(foreach crate,$(EXTERNAL_CRATES),$(eval $(call EXTERNAL_CRATE,$(crate)))) + +# Build the dependencies array for all crates +# +# $(1) is the crate to generate variables for +define BUILD_CRATE_DEPS RUST_DEPS_$(1) := $$(filter-out native:%,$$(DEPS_$(1))) NATIVE_DEPS_$(1) := $$(patsubst native:%,%,$$(filter native:%,$$(DEPS_$(1)))) endef - -$(foreach crate,$(CRATES),$(eval $(call RUST_CRATE,$(crate)))) +$(foreach crate,$(CRATES),$(eval $(call BUILD_CRATE_DEPS,$(crate)))) # Similar to the macro above for crates, this macro is for tools # diff --git a/mk/dist.mk b/mk/dist.mk index 0fc9100b85b92..0ceb627dee300 100644 --- a/mk/dist.mk +++ b/mk/dist.mk @@ -53,7 +53,9 @@ PKG_FILES := \ driver \ etc \ error-index-generator \ - $(foreach crate,$(CRATES),lib$(crate)) \ + external \ + $(foreach crate,$(TARGET_CRATES),lib$(crate)) \ + $(foreach crate,$(HOST_CRATES),lib$(crate)) \ libcollectionstest \ libcoretest \ libbacktrace \ diff --git a/src/compiletest/compiletest.rs b/src/compiletest/compiletest.rs index 9d575675cc8c9..d04a6720413fe 100644 --- a/src/compiletest/compiletest.rs +++ b/src/compiletest/compiletest.rs @@ -31,7 +31,6 @@ extern crate log; use std::env; use std::fs; use std::path::{Path, PathBuf}; -use getopts::{optopt, optflag, reqopt}; use common::Config; use common::{Pretty, DebugInfoGdb, DebugInfoLldb}; use util::logv; @@ -55,60 +54,60 @@ pub fn main() { run_tests(&config); } -pub fn parse_config(args: Vec ) -> Config { - - let groups : Vec = - vec!(reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"), - reqopt("", "run-lib-path", "path to target shared libraries", "PATH"), - reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"), - reqopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH"), - reqopt("", "python", "path to python to use for doc tests", "PATH"), - optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM"), - optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind"), - optopt("", "llvm-bin-path", "path to directory holding llvm binaries", "DIR"), - reqopt("", "src-base", "directory to scan for test files", "PATH"), - reqopt("", "build-base", "directory to deposit test outputs", "PATH"), - reqopt("", "aux-base", "directory to find auxiliary test files", "PATH"), - reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET"), - reqopt("", "mode", "which sort of compile tests to run", - "(compile-fail|parse-fail|run-fail|run-pass|run-pass-valgrind|pretty|debug-info)"), - optflag("", "ignored", "run tests marked as ignored"), - optopt("", "runtool", "supervisor program to run tests under \ - (eg. emulator, valgrind)", "PROGRAM"), - optopt("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS"), - optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS"), - optflag("", "verbose", "run tests verbosely, showing all output"), - optopt("", "logfile", "file to log test execution to", "FILE"), - optflag("", "jit", "run tests under the JIT"), - optopt("", "target", "the target to build for", "TARGET"), - optopt("", "host", "the host to build for", "HOST"), - optopt("", "gdb-version", "the version of GDB used", "VERSION STRING"), - optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING"), - optopt("", "android-cross-path", "Android NDK standalone path", "PATH"), - optopt("", "adb-path", "path to the android debugger", "PATH"), - optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH"), - optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH"), - optflag("h", "help", "show this message")); +pub fn parse_config(args: Vec) -> Config { + + let mut options = getopts::Options::new(); + options + .reqopt("", "compile-lib-path", "path to host shared libraries", "PATH") + .reqopt("", "run-lib-path", "path to target shared libraries", "PATH") + .reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH") + .reqopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH") + .reqopt("", "python", "path to python to use for doc tests", "PATH") + .optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM") + .optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind") + .optopt("", "llvm-bin-path", "path to directory holding llvm binaries", "DIR") + .reqopt("", "src-base", "directory to scan for test files", "PATH") + .reqopt("", "build-base", "directory to deposit test outputs", "PATH") + .reqopt("", "aux-base", "directory to find auxiliary test files", "PATH") + .reqopt("", "stage-id", "the target-stage identifier", "stageN-TARGET") + .reqopt("", "mode", "which sort of compile tests to run", + "(compile-fail|parse-fail|run-fail|run-pass|run-pass-valgrind|pretty|debug-info)") + .optflag("", "ignored", "run tests marked as ignored") + .optopt("", "runtool", "supervisor program to run tests under \ + (eg. emulator, valgrind)", "PROGRAM") + .optopt("", "host-rustcflags", "flags to pass to rustc for host", "FLAGS") + .optopt("", "target-rustcflags", "flags to pass to rustc for target", "FLAGS") + .optflag("", "verbose", "run tests verbosely, showing all output") + .optopt("", "logfile", "file to log test execution to", "FILE") + .optflag("", "jit", "run tests under the JIT") + .optopt("", "target", "the target to build for", "TARGET") + .optopt("", "host", "the host to build for", "HOST") + .optopt("", "gdb-version", "the version of GDB used", "VERSION STRING") + .optopt("", "lldb-version", "the version of LLDB used", "VERSION STRING") + .optopt("", "android-cross-path", "Android NDK standalone path", "PATH") + .optopt("", "adb-path", "path to the android debugger", "PATH") + .optopt("", "adb-test-dir", "path to tests for the android debugger", "PATH") + .optopt("", "lldb-python-dir", "directory containing LLDB's python module", "PATH") + .optflag("h", "help", "show this message"); assert!(!args.is_empty()); let argv0 = args[0].clone(); let args_ = args.tail(); if args[1] == "-h" || args[1] == "--help" { let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0); - println!("{}", getopts::usage(&message, &groups)); + println!("{}", options.usage(&message)); println!(""); panic!() } - let matches = - &match getopts::getopts(args_, &groups) { - Ok(m) => m, - Err(f) => panic!("{:?}", f) - }; + let matches = &match options.parse(args_) { + Ok(m) => m, + Err(f) => panic!("{:?}", f) + }; if matches.opt_present("h") || matches.opt_present("help") { let message = format!("Usage: {} [OPTIONS] [TESTNAME...]", argv0); - println!("{}", getopts::usage(&message, &groups)); + println!("{}", options.usage(&message)); println!(""); panic!() } diff --git a/src/etc/tidy.py b/src/etc/tidy.py index 9f5f919bce8d8..8fbaa2e0bf0d0 100644 --- a/src/etc/tidy.py +++ b/src/etc/tidy.py @@ -108,6 +108,7 @@ def interesting_file(f): 'src/rustllvm', 'src/rt/valgrind', 'src/rt/msvc', + 'src/external', 'src/rust-installer' } diff --git a/src/external/README.md b/src/external/README.md new file mode 100644 index 0000000000000..48913687644e6 --- /dev/null +++ b/src/external/README.md @@ -0,0 +1,42 @@ +## External crates + +All crates in this directory are hosted externally from this repository and are +imported via the standard `git-subtree` command. These crates **should not** be +edited directly, but instead changes to should go upstream and then be pulled +into these crates. + +Crates here are listed in the `EXTERNAL_CRATES` array in `mk/crates.mk` and are +built via the standard build system. + +### Adding a new external crate + +1. Make sure the crate has the appropriate `#![cfg_attr]` annotations to make + the crate unstable in the distribution with a message pointing to crates.io. + See the existing crates in the `src/external` folder for examples. + +2. To add a new crate `foo` to this folder, first execute the following: + + ```sh + git subtree add -P src/external/foo https://github.com/bar/foo master --squash + ``` + + This will check out the crate into this folder, squashing the entire history + into one commit (the rust-lang/rust repo doesn't need the whole history of the + crate). + +3. Next, edit `mk/crates.mk` appropriately by modifying `EXTERNAL_CRATES` and + possibly some other crates and/or dependency lists. + +4. Add the crate to `src/test/compile-fail-fulldeps/unstable-crates.rs` to + ensure that it is unstable in the distribution. + +### Updating an external crate + +To pull in upstream changes to a library `foo`, execute the following + +```sh +git subtree pull -P src/external/foo https://github.com/bar/foo master --squash +``` + +Similar to the addition process the `--squash` argument is provided to squash +all changes into one commit. diff --git a/src/librbml/lib.rs b/src/librbml/lib.rs index 41ae0f2d5e203..33ab2cedc01e9 100644 --- a/src/librbml/lib.rs +++ b/src/librbml/lib.rs @@ -129,7 +129,7 @@ #![cfg_attr(test, feature(test))] -extern crate serialize; +extern crate rustc_serialize as serialize; #[macro_use] extern crate log; #[cfg(test)] extern crate test; @@ -641,7 +641,7 @@ pub mod reader { fn read_u32(&mut self) -> DecodeResult { Ok(try!(self._next_int(EsU8, EsU32)) as u32) } fn read_u16(&mut self) -> DecodeResult { Ok(try!(self._next_int(EsU8, EsU16)) as u16) } fn read_u8(&mut self) -> DecodeResult { Ok(doc_as_u8(try!(self.next_doc(EsU8)))) } - fn read_uint(&mut self) -> DecodeResult { + fn read_usize(&mut self) -> DecodeResult { let v = try!(self._next_int(EsU8, EsU64)); if v > (::std::usize::MAX as u64) { Err(IntTooBig(v as usize)) @@ -654,7 +654,7 @@ pub mod reader { fn read_i32(&mut self) -> DecodeResult { Ok(try!(self._next_int(EsI8, EsI32)) as i32) } fn read_i16(&mut self) -> DecodeResult { Ok(try!(self._next_int(EsI8, EsI16)) as i16) } fn read_i8(&mut self) -> DecodeResult { Ok(doc_as_u8(try!(self.next_doc(EsI8))) as i8) } - fn read_int(&mut self) -> DecodeResult { + fn read_isize(&mut self) -> DecodeResult { let v = try!(self._next_int(EsI8, EsI64)) as i64; if v > (isize::MAX as i64) || v < (isize::MIN as i64) { debug!("FIXME \\#6122: Removing this makes this function miscompile"); @@ -1125,7 +1125,7 @@ pub mod writer { Ok(()) } - fn emit_uint(&mut self, v: usize) -> EncodeResult { + fn emit_usize(&mut self, v: usize) -> EncodeResult { self.emit_u64(v as u64) } fn emit_u64(&mut self, v: u64) -> EncodeResult { @@ -1153,7 +1153,7 @@ pub mod writer { self.wr_tagged_raw_u8(EsU8 as usize, v) } - fn emit_int(&mut self, v: isize) -> EncodeResult { + fn emit_isize(&mut self, v: isize) -> EncodeResult { self.emit_i64(v as i64) } fn emit_i64(&mut self, v: i64) -> EncodeResult { diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 4a715ca621cc2..0eac502fd1c8c 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -58,15 +58,13 @@ extern crate libc; extern crate rustc_llvm; extern crate rustc_back; extern crate rustc_data_structures; -extern crate serialize; +extern crate rustc_serialize; extern crate rbml; extern crate collections; #[macro_use] extern crate log; #[macro_use] extern crate syntax; #[macro_use] #[no_link] extern crate rustc_bitflags; -extern crate serialize as rustc_serialize; // used by deriving - #[cfg(test)] extern crate test; diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index be285d975b81d..b7fe911f8f547 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -43,7 +43,7 @@ use std::str; use rbml::reader; use rbml; -use serialize::Decodable; +use rustc_serialize::Decodable; use syntax::ast_map; use syntax::attr; use syntax::parse::token::{IdentInterner, special_idents}; diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 8eefb4d5011d2..d35abdeff1fbb 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -27,7 +27,7 @@ use middle::ty::{self, Ty}; use middle::stability; use util::nodemap::{FnvHashMap, NodeMap, NodeSet}; -use serialize::Encodable; +use rustc_serialize::Encodable; use std::cell::RefCell; use std::hash::{Hash, Hasher, SipHasher}; use std::io::prelude::*; diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index fda57c9dc610a..262ef8a064322 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -47,9 +47,9 @@ use std::fmt::Debug; use rbml::reader; use rbml::writer::Encoder; use rbml; -use serialize; -use serialize::{Decodable, Decoder, DecoderHelpers, Encodable}; -use serialize::EncoderHelpers; +use rustc_serialize; +use rustc_serialize::{Decodable, Decoder, DecoderHelpers, Encodable}; +use rustc_serialize::EncoderHelpers; #[cfg(test)] use std::io::Cursor; #[cfg(test)] use syntax::parse; @@ -313,8 +313,8 @@ trait def_id_encoder_helpers { fn emit_def_id(&mut self, did: ast::DefId); } -impl def_id_encoder_helpers for S - where ::Error: Debug +impl def_id_encoder_helpers for S + where ::Error: Debug { fn emit_def_id(&mut self, did: ast::DefId) { did.encode(self).unwrap() @@ -327,8 +327,8 @@ trait def_id_decoder_helpers { cdata: &cstore::crate_metadata) -> ast::DefId; } -impl def_id_decoder_helpers for D - where ::Error: Debug +impl def_id_decoder_helpers for D + where ::Error: Debug { fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId { let did: ast::DefId = Decodable::decode(self).unwrap(); @@ -615,7 +615,7 @@ fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, autoderef: u32, method: &MethodCallee<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; rbml_w.emit_struct("MethodCallee", 4, |rbml_w| { rbml_w.emit_struct_field("autoderef", 0, |rbml_w| { @@ -782,7 +782,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { ecx: &e::EncodeContext<'b, 'tcx>, method_origin: &ty::MethodOrigin<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_enum("MethodOrigin", |this| { match *method_origin { @@ -805,7 +805,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { Ok(this.emit_trait_ref(ecx, &p.trait_ref)) })); try!(this.emit_struct_field("method_num", 0, |this| { - this.emit_uint(p.method_num) + this.emit_usize(p.method_num) })); try!(this.emit_struct_field("impl_def_id", 0, |this| { this.emit_option(|this| { @@ -832,10 +832,10 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { Ok(this.emit_def_id(o.object_trait_id)) })); try!(this.emit_struct_field("method_num", 0, |this| { - this.emit_uint(o.method_num) + this.emit_usize(o.method_num) })); try!(this.emit_struct_field("vtable_index", 0, |this| { - this.emit_uint(o.vtable_index) + this.emit_usize(o.vtable_index) })); Ok(()) }) @@ -879,7 +879,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { fn emit_type_scheme<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, type_scheme: ty::TypeScheme<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_struct("TypeScheme", 2, |this| { this.emit_struct_field("generics", 0, |this| { @@ -924,7 +924,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { fn emit_auto_adjustment<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, adj: &ty::AutoAdjustment<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_enum("AutoAdjustment", |this| { match *adj { @@ -949,7 +949,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { } fn emit_autoref<'b>(&mut self, autoref: &ty::AutoRef<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_enum("AutoRef", |this| { match autoref { @@ -970,7 +970,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { fn emit_auto_deref_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, auto_deref_ref: &ty::AutoDerefRef<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_struct("AutoDerefRef", 2, |this| { this.emit_struct_field("autoderefs", 0, |this| auto_deref_ref.autoderefs.encode(this)); @@ -1296,7 +1296,7 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> { }, method_num: { this.read_struct_field("method_num", 1, |this| { - this.read_uint() + this.read_usize() }).unwrap() }, impl_def_id: { @@ -1330,12 +1330,12 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> { }, method_num: { this.read_struct_field("method_num", 2, |this| { - this.read_uint() + this.read_usize() }).unwrap() }, vtable_index: { this.read_struct_field("vtable_index", 3, |this| { - this.read_uint() + this.read_usize() }).unwrap() }, })) diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 48fe574e71f48..43103b226c11b 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -688,162 +688,80 @@ pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config { } /// Returns the "short" subset of the stable rustc command line options. -pub fn short_optgroups() -> Vec { - rustc_short_optgroups().into_iter() - .filter(|g|g.is_stable()) - .map(|g|g.opt_group) - .collect() -} - -/// Returns all of the stable rustc command line options. -pub fn optgroups() -> Vec { - rustc_optgroups().into_iter() - .filter(|g|g.is_stable()) - .map(|g|g.opt_group) - .collect() -} - -#[derive(Copy, Clone, PartialEq, Eq, Debug)] -pub enum OptionStability { Stable, Unstable } - -#[derive(Clone, PartialEq, Eq)] -pub struct RustcOptGroup { - pub opt_group: getopts::OptGroup, - pub stability: OptionStability, -} - -impl RustcOptGroup { - pub fn is_stable(&self) -> bool { - self.stability == OptionStability::Stable - } - - fn stable(g: getopts::OptGroup) -> RustcOptGroup { - RustcOptGroup { opt_group: g, stability: OptionStability::Stable } - } - - fn unstable(g: getopts::OptGroup) -> RustcOptGroup { - RustcOptGroup { opt_group: g, stability: OptionStability::Unstable } - } -} - -// The `opt` local module holds wrappers around the `getopts` API that -// adds extra rustc-specific metadata to each option; such metadata -// is exposed by . The public -// functions below ending with `_u` are the functions that return -// *unstable* options, i.e. options that are only enabled when the -// user also passes the `-Z unstable-options` debugging flag. -mod opt { - // The `fn opt_u` etc below are written so that we can use them - // in the future; do not warn about them not being used right now. - #![allow(dead_code)] - - use getopts; - use super::RustcOptGroup; - - pub type R = RustcOptGroup; - pub type S<'a> = &'a str; - - fn stable(g: getopts::OptGroup) -> R { RustcOptGroup::stable(g) } - fn unstable(g: getopts::OptGroup) -> R { RustcOptGroup::unstable(g) } - - // FIXME (pnkfelix): We default to stable since the current set of - // options is defacto stable. However, it would be good to revise the - // code so that a stable option is the thing that takes extra effort - // to encode. - - pub fn opt(a: S, b: S, c: S, d: S) -> R { stable(getopts::optopt(a, b, c, d)) } - pub fn multi(a: S, b: S, c: S, d: S) -> R { stable(getopts::optmulti(a, b, c, d)) } - pub fn flag(a: S, b: S, c: S) -> R { stable(getopts::optflag(a, b, c)) } - pub fn flagopt(a: S, b: S, c: S, d: S) -> R { stable(getopts::optflagopt(a, b, c, d)) } - pub fn flagmulti(a: S, b: S, c: S) -> R { stable(getopts::optflagmulti(a, b, c)) } - - - pub fn opt_u(a: S, b: S, c: S, d: S) -> R { unstable(getopts::optopt(a, b, c, d)) } - pub fn multi_u(a: S, b: S, c: S, d: S) -> R { unstable(getopts::optmulti(a, b, c, d)) } - pub fn flag_u(a: S, b: S, c: S) -> R { unstable(getopts::optflag(a, b, c)) } - pub fn flagopt_u(a: S, b: S, c: S, d: S) -> R { unstable(getopts::optflagopt(a, b, c, d)) } - pub fn flagmulti_u(a: S, b: S, c: S) -> R { unstable(getopts::optflagmulti(a, b, c)) } -} - -/// Returns the "short" subset of the rustc command line options, -/// including metadata for each option, such as whether the option is -/// part of the stable long-term interface for rustc. -pub fn rustc_short_optgroups() -> Vec { - vec![ - opt::flag("h", "help", "Display this message"), - opt::multi("", "cfg", "Configure the compilation environment", "SPEC"), - opt::multi("L", "", "Add a directory to the library search path", - "[KIND=]PATH"), - opt::multi("l", "", "Link the generated crate(s) to the specified native +pub fn short_optgroups(options: &mut getopts::Options) { + options.optflag("h", "help", "Display this message") + .optmulti("", "cfg", "Configure the compilation environment", "SPEC") + .optmulti("L", "", "Add a directory to the library search path", + "[KIND=]PATH") + .optmulti("l", "", "Link the generated crate(s) to the specified native library NAME. The optional KIND can be one of, static, dylib, or framework. If omitted, dylib is - assumed.", "[KIND=]NAME"), - opt::multi("", "crate-type", "Comma separated list of types of crates + assumed.", "[KIND=]NAME") + .optmulti("", "crate-type", "Comma separated list of types of crates for the compiler to emit", - "[bin|lib|rlib|dylib|staticlib]"), - opt::opt("", "crate-name", "Specify the name of the crate being built", - "NAME"), - opt::multi("", "emit", "Comma separated list of types of output for \ + "[bin|lib|rlib|dylib|staticlib]") + .optopt("", "crate-name", "Specify the name of the crate being built", + "NAME") + .optmulti("", "emit", "Comma separated list of types of output for \ the compiler to emit", - "[asm|llvm-bc|llvm-ir|obj|link|dep-info]"), - opt::multi("", "print", "Comma separated list of compiler information to \ + "[asm|llvm-bc|llvm-ir|obj|link|dep-info]") + .optmulti("", "print", "Comma separated list of compiler information to \ print on stdout", - "[crate-name|file-names|sysroot]"), - opt::flagmulti("g", "", "Equivalent to -C debuginfo=2"), - opt::flagmulti("O", "", "Equivalent to -C opt-level=2"), - opt::opt("o", "", "Write output to ", "FILENAME"), - opt::opt("", "out-dir", "Write output to compiler-chosen filename \ - in ", "DIR"), - opt::opt("", "explain", "Provide a detailed explanation of an error \ - message", "OPT"), - opt::flag("", "test", "Build a test harness"), - opt::opt("", "target", "Target triple cpu-manufacturer-kernel[-os] \ + "[crate-name|file-names|sysroot]") + .optflagmulti("g", "", "Equivalent to -C debuginfo=2") + .optflagmulti("O", "", "Equivalent to -C opt-level=2") + .optopt("o", "", "Write output to ", "FILENAME") + .optopt("", "out-dir", "Write output to compiler-chosen filename \ + in ", "DIR") + .optopt("", "explain", "Provide a detailed explanation of an error \ + message", "OPT") + .optflag("", "test", "Build a test harness") + .optopt("", "target", "Target triple cpu-manufacturer-kernel[-os] \ to compile for (see chapter 3.4 of \ http://www.sourceware.org/autobook/ for details)", - "TRIPLE"), - opt::multi("W", "warn", "Set lint warnings", "OPT"), - opt::multi("A", "allow", "Set lint allowed", "OPT"), - opt::multi("D", "deny", "Set lint denied", "OPT"), - opt::multi("F", "forbid", "Set lint forbidden", "OPT"), - opt::multi("C", "codegen", "Set a codegen option", "OPT[=VALUE]"), - opt::flag("V", "version", "Print version info and exit"), - opt::flag("v", "verbose", "Use verbose output"), - ] + "TRIPLE") + .optmulti("W", "warn", "Set lint warnings", "OPT") + .optmulti("A", "allow", "Set lint allowed", "OPT") + .optmulti("D", "deny", "Set lint denied", "OPT") + .optmulti("F", "forbid", "Set lint forbidden", "OPT") + .optmulti("C", "codegen", "Set a codegen option", "OPT[=VALUE]") + .optflag("V", "version", "Print version info and exit") + .optflag("v", "verbose", "Use verbose output"); } -/// Returns all rustc command line options, including metadata for -/// each option, such as whether the option is part of the stable -/// long-term interface for rustc. -pub fn rustc_optgroups() -> Vec { - let mut opts = rustc_short_optgroups(); - opts.push_all(&[ - opt::multi("", "extern", "Specify where an external rust library is \ +/// Returns all of the stable rustc command line options. +pub fn optgroups(options: &mut getopts::Options, unstable: bool) { + short_optgroups(options); + options + .optmulti("", "extern", "Specify where an external rust library is \ located", - "NAME=PATH"), - opt::opt("", "sysroot", "Override the system root", "PATH"), - opt::multi("Z", "", "Set internal debugging options", "FLAG"), - opt::opt("", "color", "Configure coloring of output: + "NAME=PATH") + .optopt("", "sysroot", "Override the system root", "PATH") + .optmulti("Z", "", "Set internal debugging options", "FLAG") + .optopt("", "color", "Configure coloring of output: auto = colorize, if output goes to a tty (default); always = always colorize output; - never = never colorize output", "auto|always|never"), - - opt::flagopt_u("", "pretty", - "Pretty-print the input instead of compiling; - valid types are: `normal` (un-annotated source), - `expanded` (crates expanded), - `typed` (crates expanded, with type annotations), or - `expanded,identified` (fully parenthesized, AST nodes with IDs).", - "TYPE"), - opt::flagopt_u("", "xpretty", - "Pretty-print the input instead of compiling, unstable variants; - valid types are any of the types for `--pretty`, as well as: - `flowgraph=` (graphviz formatted flowgraph for node), or - `everybody_loops` (all function bodies replaced with `loop {}`).", - "TYPE"), - opt::opt_u("", "show-span", "Show spans for compiler debugging", "expr|pat|ty"), - ]); - opts + never = never colorize output", "auto|always|never"); + + if unstable { + options + .optflagopt("", "pretty", + "Pretty-print the input instead of compiling; + valid types are: `normal` (un-annotated source) + `expanded` (crates expanded) + `typed` (crates expanded, with type annotations), or + `expanded,identified` (fully parenthesized, AST nodes with IDs).", + "TYPE") + .optflagopt("", "xpretty", + "Pretty-print the input instead of compiling, unstable variants; + valid types are any of the types for `--pretty`, as well as: + `flowgraph=` (graphviz formatted flowgraph for node), or + `everybody_loops` (all function bodies replaced with `loop {}`).", + "TYPE") + .optopt("", "show-span", "Show spans for compiler debugging", + "expr|pat|ty"); + } } // Convert strings provided as --cfg [cfgspec] into a crate_cfg @@ -1120,7 +1038,7 @@ mod tests { use session::config::{build_configuration, optgroups, build_session_options}; use session::build_session; - use getopts::getopts; + use getopts; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::diagnostics; @@ -1128,11 +1046,12 @@ mod tests { // When the user supplies --test we should implicitly supply --cfg test #[test] fn test_switch_implies_cfg_test() { - let matches = - &match getopts(&["--test".to_string()], &optgroups()) { - Ok(m) => m, - Err(f) => panic!("test_switch_implies_cfg_test: {}", f) - }; + let mut options = getopts::Options::new(); + optgroups(&mut options, false); + let matches = &match options.parse(&["--test"]) { + Ok(m) => m, + Err(f) => panic!("test_switch_implies_cfg_test: {}", f) + }; let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(matches); let sess = build_session(sessopts, None, registry); @@ -1144,14 +1063,14 @@ mod tests { // another --cfg test #[test] fn test_switch_implies_cfg_test_unless_cfg_test() { - let matches = - &match getopts(&["--test".to_string(), "--cfg=test".to_string()], - &optgroups()) { - Ok(m) => m, - Err(f) => { + let mut options = getopts::Options::new(); + optgroups(&mut options, false); + let matches = &match options.parse(&["--test", "--cfg=test"]) { + Ok(m) => m, + Err(f) => { panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) - } - }; + } + }; let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(matches); let sess = build_session(sessopts, None, registry); @@ -1163,10 +1082,10 @@ mod tests { #[test] fn test_can_print_warnings() { + let mut options = getopts::Options::new(); + optgroups(&mut options, false); { - let matches = getopts(&[ - "-Awarnings".to_string() - ], &optgroups()).unwrap(); + let matches = options.parse(&[ "-Awarnings" ]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1174,10 +1093,10 @@ mod tests { } { - let matches = getopts(&[ - "-Awarnings".to_string(), - "-Dwarnings".to_string() - ], &optgroups()).unwrap(); + let matches = options.parse(&[ + "-Awarnings", + "-Dwarnings", + ]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1185,9 +1104,9 @@ mod tests { } { - let matches = getopts(&[ - "-Adead_code".to_string() - ], &optgroups()).unwrap(); + let matches = options.parse(&[ + "-Adead_code", + ]).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); diff --git a/src/librustc_back/lib.rs b/src/librustc_back/lib.rs index 7d46cc84fd685..f9cef0547fe92 100644 --- a/src/librustc_back/lib.rs +++ b/src/librustc_back/lib.rs @@ -46,7 +46,7 @@ extern crate syntax; extern crate libc; -extern crate serialize; +extern crate rustc_serialize; extern crate rustc_llvm; #[macro_use] extern crate log; diff --git a/src/librustc_back/sha2.rs b/src/librustc_back/sha2.rs index 9ed827da8b2e4..d030c1eccefec 100644 --- a/src/librustc_back/sha2.rs +++ b/src/librustc_back/sha2.rs @@ -14,7 +14,7 @@ use std::iter::repeat; use std::slice::bytes::{MutableByteVector, copy_memory}; -use serialize::hex::ToHex; +use rustc_serialize::hex::ToHex; /// Write a u32 into a vector, which must be 4 bytes long. The value is written in big-endian /// format. @@ -532,7 +532,7 @@ mod tests { use self::rand::Rng; use self::rand::isaac::IsaacRng; - use serialize::hex::FromHex; + use rustc_serialize::hex::FromHex; use std::iter::repeat; use std::u64; use super::{Digest, Sha256, FixedBuffer}; diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index 22d966014da1c..ad2a65c1c0e85 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -45,7 +45,7 @@ //! settings, though `target-feature` and `link-args` will *add* to the list //! specified by the target, rather than replace. -use serialize::json::Json; +use rustc_serialize::json::Json; use std::default::Default; use std::io::prelude::*; use syntax::{diagnostic, abi}; @@ -310,14 +310,12 @@ impl Target { use std::ffi::OsString; use std::fs::File; use std::path::{Path, PathBuf}; - use serialize::json; fn load_file(path: &Path) -> Result { let mut f = try!(File::open(path).map_err(|e| e.to_string())); - let mut contents = Vec::new(); - try!(f.read_to_end(&mut contents).map_err(|e| e.to_string())); - let obj = try!(json::from_reader(&mut &contents[..]) - .map_err(|e| e.to_string())); + let mut contents = String::new(); + try!(f.read_to_string(&mut contents).map_err(|e| e.to_string())); + let obj = try!(contents.parse::().map_err(|e| e.to_string())); Ok(Target::from_json(obj)) } diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index 1f8f7694ff90d..9cbe7a5376ed2 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -31,7 +31,7 @@ #![cfg_attr(test, feature(test))] #[macro_use] extern crate log; -extern crate serialize as rustc_serialize; // used by deriving +extern crate rustc_serialize; pub mod snapshot_vec; pub mod graph; diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 13dec30e0a016..3b7f5834cc179 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -29,7 +29,7 @@ use rustc_typeck as typeck; use rustc_privacy; use super::Compilation; -use serialize::json; +use rustc_serialize::json; use std::env; use std::ffi::OsString; diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 49879b472feb7..07faf2825e428 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -48,7 +48,7 @@ extern crate rustc_privacy; extern crate rustc_resolve; extern crate rustc_trans; extern crate rustc_typeck; -extern crate serialize; +extern crate rustc_serialize; extern crate rustc_llvm as llvm; #[macro_use] extern crate log; #[macro_use] extern crate syntax; @@ -500,15 +500,12 @@ pub fn version(binary: &str, matches: &getopts::Matches) { } fn usage(verbose: bool, include_unstable_options: bool) { - let groups = if verbose { - config::rustc_optgroups() + let mut opts = getopts::Options::new(); + if verbose { + config::optgroups(&mut opts, include_unstable_options) } else { - config::rustc_short_optgroups() - }; - let groups : Vec<_> = groups.into_iter() - .filter(|x| include_unstable_options || x.is_stable()) - .map(|x|x.opt_group) - .collect(); + config::short_optgroups(&mut opts) + } let message = format!("Usage: rustc [OPTIONS] INPUT"); let extra_help = if verbose { "" @@ -520,7 +517,7 @@ Additional help: -C help Print codegen options -W help Print 'lint' options and default settings -Z help Print internal options for debugging rustc{}\n", - getopts::usage(&message, &groups), + opts.usage(&message), extra_help); } @@ -683,49 +680,25 @@ pub fn handle_options(mut args: Vec) -> Option { r.iter().any(|x| *x == "unstable-options") } - fn parse_all_options(args: &Vec) -> getopts::Matches { - let all_groups : Vec - = config::rustc_optgroups().into_iter().map(|x|x.opt_group).collect(); - match getopts::getopts(&args[..], &all_groups) { - Ok(m) => { - if !allows_unstable_options(&m) { - // If -Z unstable-options was not specified, verify that - // no unstable options were present. - for opt in config::rustc_optgroups().into_iter().filter(|x| !x.is_stable()) { - let opt_name = if !opt.opt_group.long_name.is_empty() { - &opt.opt_group.long_name - } else { - &opt.opt_group.short_name - }; - if m.opt_present(opt_name) { - early_error(&format!("use of unstable option '{}' requires \ - -Z unstable-options", opt_name)); - } - } - } - m - } - Err(f) => early_error(&f.to_string()) + let mut stable_options = getopts::Options::new(); + config::optgroups(&mut stable_options, false); + let mut unstable_options = getopts::Options::new(); + config::optgroups(&mut unstable_options, true); + + // Parse using unstable options first to make sure we can parse at all. If + // unstable options are enabled, then we're done, but if unstable options + // are enabled when then also ensure that we can parse with stable options, + // generating an error if this is not the case + let matches = unstable_options.parse(&args).map(|matches| { + if allows_unstable_options(&matches) { + return matches } - } - - // As a speed optimization, first try to parse the command-line using just - // the stable options. - let matches = match getopts::getopts(&args[..], &config::optgroups()) { - Ok(ref m) if allows_unstable_options(m) => { - // If -Z unstable-options was specified, redo parsing with the - // unstable options to ensure that unstable options are defined - // in the returned getopts::Matches. - parse_all_options(&args) - } - Ok(m) => m, - Err(_) => { - // redo option parsing, including unstable options this time, - // in anticipation that the mishandled option was one of the - // unstable ones. - parse_all_options(&args) - } - }; + stable_options.parse(&args).unwrap_or_else(|_| { + early_error("use of unstable option requires -Z unstable-options") + }) + }).unwrap_or_else(|err| { + early_error(&err.to_string()) + }); if matches.opt_present("h") || matches.opt_present("help") { usage(matches.opt_present("verbose"), allows_unstable_options(&matches)); diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index c416a9810eb0e..55208fe9889fe 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -36,7 +36,7 @@ use std::path::{Path, PathBuf}; use std::process::Command; use std::str; use flate; -use serialize::hex::ToHex; +use rustc_serialize::hex::ToHex; use syntax::ast; use syntax::ast_map::{PathElem, PathElems, PathName}; use syntax::attr::AttrMetaMethods; diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index f25c6eb21a47b..f1c7d6b47c77e 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -51,7 +51,7 @@ extern crate graphviz; extern crate libc; extern crate rustc; extern crate rustc_back; -extern crate serialize; +extern crate rustc_serialize; extern crate rustc_llvm as llvm; #[macro_use] extern crate log; diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index c4f2c7207ac39..4049af3810859 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -51,7 +51,7 @@ use std::sync::Arc; use externalfiles::ExternalHtml; -use serialize::json::{self, ToJson}; +use rustc_serialize::json::{self, ToJson}; use syntax::{abi, ast, ast_util, attr}; use rustc::util::nodemap::NodeSet; diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index 38cc120698431..e93ae381b6d37 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -44,14 +44,12 @@ extern crate rustc_driver; extern crate rustc_resolve; extern crate rustc_lint; extern crate rustc_back; -extern crate serialize; +extern crate rustc_serialize; extern crate syntax; extern crate test as testing; extern crate rustc_unicode; #[macro_use] extern crate log; -extern crate serialize as rustc_serialize; // used by deriving - use std::cell::RefCell; use std::collections::HashMap; use std::env; @@ -62,8 +60,8 @@ use std::rc::Rc; use std::sync::mpsc::channel; use externalfiles::ExternalHtml; -use serialize::Decodable; -use serialize::json::{self, Json}; +use rustc_serialize::Decodable; +use rustc_serialize::json::{self, Json}; use rustc::session::search_paths::SearchPaths; // reexported from `clean` so it can be easily updated with the mod itself @@ -134,62 +132,60 @@ pub fn main() { env::set_exit_status(res as i32); } -pub fn opts() -> Vec { - use getopts::*; - vec!( - optflag("h", "help", "show this help message"), - optflag("V", "version", "print rustdoc's version"), - optflag("v", "verbose", "use verbose output"), - optopt("r", "input-format", "the input type of the specified file", - "[rust|json]"), - optopt("w", "output-format", "the output type to write", - "[html|json]"), - optopt("o", "output", "where to place the output", "PATH"), - optopt("", "crate-name", "specify the name of this crate", "NAME"), - optmulti("L", "library-path", "directory to add to crate search path", - "DIR"), - optmulti("", "cfg", "pass a --cfg to rustc", ""), - optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH"), - optmulti("", "plugin-path", "directory to load plugins from", "DIR"), - optmulti("", "passes", "list of passes to also run, you might want \ +pub fn opts(options: &mut getopts::Options) { + options + .optflag("h", "help", "show this help message") + .optflag("V", "version", "print rustdoc's version") + .optflag("v", "verbose", "use verbose output") + .optopt("r", "input-format", "the input type of the specified file", + "[rust|json]") + .optopt("w", "output-format", "the output type to write", + "[html|json]") + .optopt("o", "output", "where to place the output", "PATH") + .optopt("", "crate-name", "specify the name of this crate", "NAME") + .optmulti("L", "library-path", "directory to add to crate search path", + "DIR") + .optmulti("", "cfg", "pass a --cfg to rustc", "") + .optmulti("", "extern", "pass an --extern to rustc", "NAME=PATH") + .optmulti("", "plugin-path", "directory to load plugins from", "DIR") + .optmulti("", "passes", "list of passes to also run, you might want \ to pass it multiple times; a value of `list` \ will print available passes", - "PASSES"), - optmulti("", "plugins", "space separated list of plugins to also load", - "PLUGINS"), - optflag("", "no-defaults", "don't run the default passes"), - optflag("", "test", "run code examples as tests"), - optmulti("", "test-args", "arguments to pass to the test runner", - "ARGS"), - optopt("", "target", "target triple to document", "TRIPLE"), - optmulti("", "markdown-css", "CSS files to include via in a rendered Markdown file", - "FILES"), - optmulti("", "html-in-header", + "PASSES") + .optmulti("", "plugins", "space separated list of plugins to also load", + "PLUGINS") + .optflag("", "no-defaults", "don't run the default passes") + .optflag("", "test", "run code examples as tests") + .optmulti("", "test-args", "arguments to pass to the test runner", + "ARGS") + .optopt("", "target", "target triple to document", "TRIPLE") + .optmulti("", "markdown-css", "CSS files to include via in a rendered Markdown file", + "FILES") + .optmulti("", "html-in-header", "files to include inline in the section of a rendered Markdown file \ or generated documentation", - "FILES"), - optmulti("", "html-before-content", + "FILES") + .optmulti("", "html-before-content", "files to include inline between and the content of a rendered \ Markdown file or generated documentation", - "FILES"), - optmulti("", "html-after-content", + "FILES") + .optmulti("", "html-after-content", "files to include inline between the content and of a rendered \ Markdown file or generated documentation", - "FILES"), - optopt("", "markdown-playground-url", - "URL to send code snippets to", "URL"), - optflag("", "markdown-no-toc", "don't include table of contents") - ) + "FILES") + .optopt("", "markdown-playground-url", + "URL to send code snippets to", "URL") + .optflag("", "markdown-no-toc", "don't include table of contents"); } -pub fn usage(argv0: &str) { - println!("{}", - getopts::usage(&format!("{} [options] ", argv0), - &opts())); +pub fn usage(argv0: &str, options: &getopts::Options) { + println!("{}", options.usage(&format!("{} [options] ", argv0))); } pub fn main_args(args: &[String]) -> isize { - let matches = match getopts::getopts(args.tail(), &opts()) { + let mut options = getopts::Options::new(); + opts(&mut options); + let matches = match options.parse(args.tail()) { Ok(m) => m, Err(err) => { println!("{}", err); @@ -197,7 +193,7 @@ pub fn main_args(args: &[String]) -> isize { } }; if matches.opt_present("h") || matches.opt_present("help") { - usage(&args[0]); + usage(&args[0], &options); return 0; } else if matches.opt_present("version") { rustc_driver::version("rustdoc", &matches); @@ -456,12 +452,12 @@ fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matche /// This input format purely deserializes the json output file. No passes are /// run over the deserialized output. fn json_input(input: &str) -> Result { - let mut bytes = Vec::new(); - match File::open(input).and_then(|mut f| f.read_to_end(&mut bytes)) { + let mut contents = String::new(); + match File::open(input).and_then(|mut f| f.read_to_string(&mut contents)) { Ok(_) => {} Err(e) => return Err(format!("couldn't open {}: {}", input, e)), }; - match json::from_reader(&mut &bytes[..]) { + match contents.parse() { Err(s) => Err(format!("{:?}", s)), Ok(Json::Object(obj)) => { let mut obj = obj; @@ -520,7 +516,7 @@ fn json_output(krate: clean::Crate, res: Vec , // FIXME #8335: yuck, Rust -> str -> JSON round trip! No way to .encode // straight to the Rust JSON representation. let crate_json_str = format!("{}", json::as_json(&krate)); - let crate_json = match json::from_str(&crate_json_str) { + let crate_json = match crate_json_str.parse() { Ok(j) => j, Err(e) => panic!("Rust generated JSON is invalid: {:?}", e) }; diff --git a/src/librustdoc/plugins.rs b/src/librustdoc/plugins.rs index d4d214f449d59..de49003436b5f 100644 --- a/src/librustdoc/plugins.rs +++ b/src/librustdoc/plugins.rs @@ -13,7 +13,7 @@ use clean; use std::dynamic_lib as dl; -use serialize::json; +use rustc_serialize::json; use std::mem; use std::string::String; use std::path::PathBuf; diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 5b03b3bf0385c..783c1585d274a 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -67,7 +67,7 @@ use ptr::P; use std::fmt; use std::rc::Rc; -use serialize::{Encodable, Decodable, Encoder, Decoder}; +use rustc_serialize::{Encodable, Decodable, Encoder, Decoder}; // FIXME #6993: in librustc, uses of "ident" should be replaced // by just "Name". @@ -1879,13 +1879,13 @@ pub struct MacroDef { #[cfg(test)] mod tests { - use serialize; + use rustc_serialize; use super::*; // are ASTs encodable? #[test] fn check_asts_encodable() { - fn assert_encodable() {} + fn assert_encodable() {} assert_encodable::(); } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index b2a366ec5beb6..8c81b940fc013 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -27,7 +27,7 @@ use std::rc::Rc; use std::{fmt, fs}; use std::io::{self, Read}; -use serialize::{Encodable, Decodable, Encoder, Decoder}; +use rustc_serialize::{Encodable, Decodable, Encoder, Decoder}; // _____________________________________________________________________________ diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 718bc1773fe59..53bfb5efc02ba 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -20,7 +20,7 @@ use std::cell::{RefCell, Cell}; use std::{cmp, error, fmt}; use std::io::prelude::*; use std::io; -use term::{self, WriterWrapper}; +use term; use libc; /// maximum number of lines we will print for each error; arbitrary. @@ -310,7 +310,7 @@ impl Level { fn print_maybe_styled(w: &mut EmitterWriter, msg: &str, - color: term::attr::Attr) -> io::Result<()> { + color: term::Attr) -> io::Result<()> { match w.dst { Terminal(ref mut t) => { try!(t.attr(color)); @@ -349,14 +349,14 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, try!(print_maybe_styled(dst, &format!("{}: ", lvl.to_string()), - term::attr::ForegroundColor(lvl.color()))); + term::Attr::ForegroundColor(lvl.color()))); try!(print_maybe_styled(dst, &format!("{}", msg), - term::attr::Bold)); + term::Attr::Bold)); match code { Some(code) => { - let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); + let style = term::Attr::ForegroundColor(term::color::BRIGHT_MAGENTA); try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style)); } None => () @@ -371,7 +371,7 @@ pub struct EmitterWriter { } enum Destination { - Terminal(Box + Send>), + Terminal(Box + Send>), Raw(Box), } @@ -679,7 +679,7 @@ fn highlight_lines(err: &mut EmitterWriter, try!(print_maybe_styled(err, &format!("{}\n", s), - term::attr::ForegroundColor(lvl.color()))); + term::Attr::ForegroundColor(lvl.color()))); } } Ok(()) @@ -754,7 +754,7 @@ fn end_highlight_lines(w: &mut EmitterWriter, s.push('\n'); print_maybe_styled(w, &s[..], - term::attr::ForegroundColor(lvl.color())) + term::Attr::ForegroundColor(lvl.color())) } fn print_macro_backtrace(w: &mut EmitterWriter, diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 20f8c9707bfdd..7541a982f6c8f 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -37,14 +37,12 @@ extern crate arena; extern crate fmt_macros; -extern crate serialize; +extern crate rustc_serialize; extern crate term; extern crate libc; #[macro_use] extern crate log; #[macro_use] #[no_link] extern crate rustc_bitflags; -extern crate serialize as rustc_serialize; // used by deriving - // A variant of 'try!' that panics on Err(FatalError). This is used as a // crutch on the way towards a non-panic!-prone parser. It should be used // for fatal parsing errors; eventually we plan to convert all code using diff --git a/src/libsyntax/owned_slice.rs b/src/libsyntax/owned_slice.rs index 25f1f9b8480a1..eaecce4ed80d0 100644 --- a/src/libsyntax/owned_slice.rs +++ b/src/libsyntax/owned_slice.rs @@ -13,7 +13,7 @@ use std::fmt; use std::iter::{IntoIterator, FromIterator}; use std::ops::Deref; use std::vec; -use serialize::{Encodable, Decodable, Encoder, Decoder}; +use rustc_serialize::{Encodable, Decodable, Encoder, Decoder}; /// A non-growable owned slice. This is a separate type to allow the /// representation to change. diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 832fec40199b8..2ff1d7a356580 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -21,7 +21,7 @@ use ptr::P; use util::interner::{RcStr, StrInterner}; use util::interner; -use serialize::{Decodable, Decoder, Encodable, Encoder}; +use rustc_serialize::{Decodable, Decoder, Encodable, Encoder}; use std::fmt; use std::ops::Deref; use std::rc::Rc; diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs index 83e321f110c58..e06f487e604ed 100644 --- a/src/libsyntax/ptr.rs +++ b/src/libsyntax/ptr.rs @@ -41,7 +41,7 @@ use std::hash::{Hash, Hasher}; use std::ops::Deref; use std::ptr; -use serialize::{Encodable, Decodable, Encoder, Decoder}; +use rustc_serialize::{Encodable, Decodable, Encoder, Decoder}; /// An owned smart pointer. pub struct P { diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index da86e727c6874..1de3cefffc42a 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -47,8 +47,7 @@ #![feature(duration_span)] extern crate getopts; -extern crate serialize; -extern crate serialize as rustc_serialize; +extern crate rustc_serialize; extern crate term; extern crate libc; @@ -61,8 +60,7 @@ use self::NamePadding::*; use self::OutputLocation::*; use stats::Stats; -use getopts::{OptGroup, optflag, optopt}; -use serialize::Encodable; +use rustc_serialize::Encodable; use std::boxed::FnBox; use term::Terminal; use term::color::{Color, RED, YELLOW, GREEN, CYAN}; @@ -311,22 +309,23 @@ impl TestOpts { /// Result of parsing the options. pub type OptRes = Result; -fn optgroups() -> Vec { - vec!(getopts::optflag("", "ignored", "Run ignored tests"), - getopts::optflag("", "test", "Run tests and not benchmarks"), - getopts::optflag("", "bench", "Run benchmarks instead of tests"), - getopts::optflag("h", "help", "Display this message (longer with --help)"), - getopts::optopt("", "logfile", "Write logs to the specified file instead \ - of stdout", "PATH"), - getopts::optflag("", "nocapture", "don't capture stdout/stderr of each \ - task, allow printing directly"), - getopts::optopt("", "color", "Configure coloring of output: +fn optgroups(options: &mut getopts::Options) { + options + .optflag("", "ignored", "Run ignored tests") + .optflag("", "test", "Run tests and not benchmarks") + .optflag("", "bench", "Run benchmarks instead of tests") + .optflag("h", "help", "Display this message (longer with --help)") + .optopt("", "logfile", "Write logs to the specified file instead \ + of stdout", "PATH") + .optflag("", "nocapture", "don't capture stdout/stderr of each \ + task, allow printing directly") + .optopt("", "color", "Configure coloring of output: auto = colorize if stdout is a tty and tests are run on serially (default); always = always colorize output; - never = never colorize output;", "auto|always|never")) + never = never colorize output;", "auto|always|never"); } -fn usage(binary: &str) { +fn usage(binary: &str, options: &mut getopts::Options) { let message = format!("Usage: {} [OPTIONS] [FILTER]", binary); println!(r#"{usage} @@ -354,19 +353,20 @@ Test Attributes: test, then the test runner will ignore these tests during normal test runs. Running with --ignored will run these tests."#, - usage = getopts::usage(&message, &optgroups())); + usage = options.usage(&message)); } // Parses command line arguments into test options pub fn parse_opts(args: &[String]) -> Option { let args_ = args.tail(); - let matches = - match getopts::getopts(args_, &optgroups()) { - Ok(m) => m, - Err(f) => return Some(Err(f.to_string())) - }; + let mut options = getopts::Options::new(); + optgroups(&mut options); + let matches = match options.parse(args_) { + Ok(m) => m, + Err(f) => return Some(Err(f.to_string())) + }; - if matches.opt_present("h") { usage(&args[0]); return None; } + if matches.opt_present("h") { usage(&args[0], &mut options); return None; } let filter = if !matches.free.is_empty() { Some(matches.free[0].clone()) @@ -429,7 +429,7 @@ pub enum TestResult { unsafe impl Send for TestResult {} enum OutputLocation { - Pretty(Box + Send>), + Pretty(Box + Send>), Raw(T), } diff --git a/src/test/bench/shootout-pfib.rs b/src/test/bench/shootout-pfib.rs index 2d5aae30ae8a9..43985113b21dd 100644 --- a/src/test/bench/shootout-pfib.rs +++ b/src/test/bench/shootout-pfib.rs @@ -53,17 +53,16 @@ struct Config { stress: bool } -fn parse_opts(argv: Vec ) -> Config { - let opts = vec!(getopts::optflag("", "stress", "")); +fn parse_opts(argv: Vec) -> Config { + let mut options = getopts::Options::new(); + options.optflag("", "stress", ""); let argv = argv.iter().map(|x| x.to_string()).collect::>(); let opt_args = &argv[1..argv.len()]; - match getopts::getopts(opt_args, &opts) { - Ok(ref m) => { - return Config {stress: m.opt_present("stress")} - } - Err(_) => { panic!(); } + match options.parse(opt_args) { + Ok(ref m) => Config {stress: m.opt_present("stress")}, + Err(_) => { panic!(); } } } diff --git a/src/test/compile-fail-fulldeps/unstable-crates.rs b/src/test/compile-fail-fulldeps/unstable-crates.rs new file mode 100644 index 0000000000000..a174b1ae735d1 --- /dev/null +++ b/src/test/compile-fail-fulldeps/unstable-crates.rs @@ -0,0 +1,35 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +extern crate alloc; //~ ERROR: use of unstable library feature +extern crate arena; //~ ERROR: use of unstable library feature +extern crate collections; //~ ERROR: use of unstable library feature +extern crate core; //~ ERROR: use of unstable library feature +extern crate flate; //~ ERROR: use of unstable library feature +extern crate fmt_macros; //~ ERROR: use of unstable library feature +extern crate getopts; //~ ERROR: use of unstable library feature +extern crate graphviz; //~ ERROR: use of unstable library feature +extern crate libc; //~ ERROR: use of unstable library feature +extern crate log; //~ ERROR: use of unstable library feature +extern crate rand; //~ ERROR: use of unstable library feature +extern crate rbml; //~ ERROR: use of unstable library feature +extern crate rustc; //~ ERROR: use of unstable library feature +extern crate rustc_driver; //~ ERROR: use of unstable library feature +extern crate rustc_serialize; //~ ERROR: use of unstable library feature +extern crate rustdoc; //~ ERROR: use of unstable library feature +extern crate syntax; //~ ERROR: use of unstable library feature +extern crate term; //~ ERROR: use of unstable library feature +extern crate test; //~ ERROR: use of unstable library feature + +mod foo { + extern crate std; +} + +fn main() {} diff --git a/src/test/run-pass/derive-no-std.rs b/src/test/run-pass/derive-no-std.rs index 0234d7b0b6376..acf38184df9df 100644 --- a/src/test/run-pass/derive-no-std.rs +++ b/src/test/run-pass/derive-no-std.rs @@ -13,7 +13,7 @@ extern crate core; extern crate rand; -extern crate serialize as rustc_serialize; +extern crate rustc_serialize; extern crate collections; // Issue #16803 diff --git a/src/test/run-pass/deriving-encodable-decodable-box.rs b/src/test/run-pass/deriving-encodable-decodable-box.rs index db5a1f3f000a8..c5a9ea2b409af 100644 --- a/src/test/run-pass/deriving-encodable-decodable-box.rs +++ b/src/test/run-pass/deriving-encodable-decodable-box.rs @@ -13,12 +13,12 @@ #![feature(box_syntax)] #![feature(rustc_private)] -extern crate serialize; +extern crate rustc_serialize; -use serialize::{Encodable, Decodable}; -use serialize::json; +use rustc_serialize::{Encodable, Decodable}; +use rustc_serialize::json; -#[derive(Encodable, Decodable)] +#[derive(RustcEncodable, RustcDecodable)] struct A { foo: Box<[bool]>, } diff --git a/src/test/run-pass/deriving-encodable-decodable-cell-refcell.rs b/src/test/run-pass/deriving-encodable-decodable-cell-refcell.rs index 7cc59edfcab0e..b4979bf581aad 100644 --- a/src/test/run-pass/deriving-encodable-decodable-cell-refcell.rs +++ b/src/test/run-pass/deriving-encodable-decodable-cell-refcell.rs @@ -14,18 +14,18 @@ #![feature(rustc_private)] -extern crate serialize; +extern crate rustc_serialize; use std::cell::{Cell, RefCell}; -use serialize::{Encodable, Decodable}; -use serialize::json; +use rustc_serialize::{Encodable, Decodable}; +use rustc_serialize::json; -#[derive(Encodable, Decodable)] +#[derive(RustcEncodable, RustcDecodable)] struct A { baz: isize } -#[derive(Encodable, Decodable)] +#[derive(RustcEncodable, RustcDecodable)] struct B { foo: Cell, bar: RefCell, diff --git a/src/test/run-pass/deriving-global.rs b/src/test/run-pass/deriving-global.rs index 10e8ddc41f3f9..29f2ff38065b4 100644 --- a/src/test/run-pass/deriving-global.rs +++ b/src/test/run-pass/deriving-global.rs @@ -10,7 +10,7 @@ #![feature(rand, rustc_private)] -extern crate serialize; +extern crate rustc_serialize; mod submod { // if any of these are implemented without global calls for any @@ -20,21 +20,21 @@ mod submod { Hash, Clone, Debug, - Encodable, Decodable)] + RustcEncodable, RustcDecodable)] enum A { A1(usize), A2(isize) } #[derive(PartialEq, PartialOrd, Eq, Ord, Hash, Clone, Debug, - Encodable, Decodable)] + RustcEncodable, RustcDecodable)] struct B { x: usize, y: isize } #[derive(PartialEq, PartialOrd, Eq, Ord, Hash, Clone, Debug, - Encodable, Decodable)] + RustcEncodable, RustcDecodable)] struct C(usize, isize); } diff --git a/src/test/run-pass/extern-mod-syntax.rs b/src/test/run-pass/extern-mod-syntax.rs index 37404ee7e696c..8bfc8f8050b14 100644 --- a/src/test/run-pass/extern-mod-syntax.rs +++ b/src/test/run-pass/extern-mod-syntax.rs @@ -11,8 +11,8 @@ #![allow(unused_imports)] #![feature(rustc_private)] -extern crate serialize; -use serialize::json::Object; +extern crate rustc_serialize; +use rustc_serialize::json::Object; pub fn main() { println!("Hello world!"); diff --git a/src/test/run-pass/getopts_ref.rs b/src/test/run-pass/getopts_ref.rs index c9595d09e21b2..0a918fd7394da 100644 --- a/src/test/run-pass/getopts_ref.rs +++ b/src/test/run-pass/getopts_ref.rs @@ -8,20 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. - #![feature(rustc_private)] extern crate getopts; -use getopts::{optopt, getopts}; - -pub fn main() { - let args = Vec::new(); - let opts = vec!(optopt("b", "", "something", "SMTHNG")); +fn main() { + let args = Vec::::new(); + let mut options = getopts::Options::new(); + options.optopt("b", "", "something", "SMTHNG"); - match getopts(&args, &opts) { - Ok(ref m) => - assert!(!m.opt_present("b")), + match options.parse(&args) { + Ok(ref m) => assert!(!m.opt_present("b")), Err(ref f) => panic!("{}", *f) }; diff --git a/src/test/run-pass/issue-11881.rs b/src/test/run-pass/issue-11881.rs index 9da04f7235531..4f374ed4c448a 100644 --- a/src/test/run-pass/issue-11881.rs +++ b/src/test/run-pass/issue-11881.rs @@ -12,24 +12,24 @@ #![feature(rustc_private)] extern crate rbml; -extern crate serialize; +extern crate rustc_serialize; use std::io::Cursor; use std::io::prelude::*; use std::fmt; use std::slice; -use serialize::{Encodable, Encoder}; -use serialize::json; +use rustc_serialize::{Encodable, Encoder}; +use rustc_serialize::json; use rbml::writer; -#[derive(Encodable)] +#[derive(RustcEncodable)] struct Foo { baz: bool, } -#[derive(Encodable)] +#[derive(RustcEncodable)] struct Bar { froboz: usize, } diff --git a/src/test/run-pass/issue-14021.rs b/src/test/run-pass/issue-14021.rs index 907967d115d58..4188fedad9011 100644 --- a/src/test/run-pass/issue-14021.rs +++ b/src/test/run-pass/issue-14021.rs @@ -10,19 +10,19 @@ #![feature(rustc_private)] -extern crate serialize; +extern crate rustc_serialize; -use serialize::{Encodable, Decodable}; -use serialize::json; +use rustc_serialize::{Encodable, Decodable}; +use rustc_serialize::json; -#[derive(Encodable, Decodable, PartialEq, Debug)] +#[derive(RustcEncodable, RustcDecodable, PartialEq, Debug)] struct UnitLikeStruct; pub fn main() { let obj = UnitLikeStruct; let json_str: String = json::encode(&obj).unwrap(); - let json_object = json::from_str(&json_str); + let json_object = json_str.parse(); let mut decoder = json::Decoder::new(json_object.unwrap()); let mut decoded_obj: UnitLikeStruct = Decodable::decode(&mut decoder).unwrap(); diff --git a/src/test/run-pass/issue-15924.rs b/src/test/run-pass/issue-15924.rs index 0c208773884d4..f61c56336aa99 100644 --- a/src/test/run-pass/issue-15924.rs +++ b/src/test/run-pass/issue-15924.rs @@ -12,11 +12,11 @@ #![feature(rustc_private)] -extern crate serialize; +extern crate rustc_serialize; use std::fmt; -use serialize::{Encoder, Encodable}; -use serialize::json; +use rustc_serialize::{Encoder, Encodable}; +use rustc_serialize::json; struct Foo { v: T, diff --git a/src/test/run-pass/issue-2804.rs b/src/test/run-pass/issue-2804.rs index a2b4e218a079b..c44004d2aceb4 100644 --- a/src/test/run-pass/issue-2804.rs +++ b/src/test/run-pass/issue-2804.rs @@ -12,10 +12,10 @@ #![feature(rustc_private)] extern crate collections; -extern crate serialize; +extern crate rustc_serialize; use std::collections::HashMap; -use serialize::json::{self, Json}; +use rustc_serialize::json::{self, Json}; use std::option; enum object { diff --git a/src/test/run-pass/issue-4016.rs b/src/test/run-pass/issue-4016.rs index bc3fa162e02bd..3eb1804b612e9 100644 --- a/src/test/run-pass/issue-4016.rs +++ b/src/test/run-pass/issue-4016.rs @@ -8,17 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. - #![feature(rustc_private)] -extern crate serialize; +extern crate rustc_serialize; -use serialize::{json, Decodable}; +use rustc_serialize::{json, Decodable}; trait JD : Decodable {} fn exec() { - let doc = json::from_str("").unwrap(); + let doc = "".parse().unwrap(); let mut decoder = json::Decoder::new(doc); let _v: T = Decodable::decode(&mut decoder).unwrap(); panic!() diff --git a/src/test/run-pass/issue-4036.rs b/src/test/run-pass/issue-4036.rs index ae7bb8a684224..5e2760cdc685e 100644 --- a/src/test/run-pass/issue-4036.rs +++ b/src/test/run-pass/issue-4036.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. - // Issue #4036: Test for an issue that arose around fixing up type inference // byproducts in vtable records. @@ -16,12 +15,12 @@ #![feature(rustc_private)] -extern crate serialize; +extern crate rustc_serialize; -use serialize::{json, Decodable}; +use rustc_serialize::{json, Decodable}; pub fn main() { - let json = json::from_str("[1]").unwrap(); + let json = "[1]".parse().unwrap(); let mut decoder = json::Decoder::new(json); let _x: Vec = Decodable::decode(&mut decoder).unwrap(); }