Merge remote-tracking branch 'remotes/origin/master' into serde

This commit is contained in:
Erick Tryzelaar 2015-08-12 22:15:58 -07:00
commit 4e0172a9a5
7 changed files with 333 additions and 130 deletions

View file

@ -1,5 +1,11 @@
language: rust language: rust
rust:
- 1.0.0
- beta
- nightly
sudo: false sudo: false
before_script:
- pip install 'travis-cargo<0.2' --user && export PATH=$HOME/.local/bin:$PATH
script: script:
- cargo build --verbose - cargo build --verbose
- cargo build --verbose --no-default-features - cargo build --verbose --no-default-features
@ -7,17 +13,18 @@ script:
- cargo test --verbose --features serde - cargo test --verbose --features serde
- rustdoc --test README.md -L target - rustdoc --test README.md -L target
- cargo doc --no-deps - cargo doc --no-deps
after_success: | after_success:
[ $TRAVIS_BRANCH = master ] && - travis-cargo --only nightly doc-upload
[ $TRAVIS_PULL_REQUEST = false ] && - travis-cargo coveralls --no-sudo
echo '<meta http-equiv=refresh content=0;url=toml/index.html>' > target/doc/index.html &&
pip install ghp-import --user $USER &&
$HOME/.local/bin/ghp-import -n target/doc &&
git push -qf https://${TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
env: env:
global: global:
- secure: FO8GVrtHAn5GTj4LOz2BApC3tAEsMbNzvH5UVmCIeNKPuVcKcI3oWNJC/KMCvuJZhu96J3okfRLBxBJrhxsp/YT4fS4kibhZDm6AzbCqxz6AmvHJo2d0jztoRyuLwLSkhwW8vM4VeHH+Tf4PeC56YmnpUGkccHMMidxytJzx8qI= secure: LZMkQQJT5LqLQQ8JyakjvHNqqMPy8lm/SyC+H5cKUVI/xk7xRuti4eKY937N8uSmbff2m9ZYlG6cNwIOfk/nWn8YsqxA8Wg/xugubWzqGuqu+NQ4IZVa7INT2Fiqyk5SPCh8B5fo2x7OBJ24SCkWb2p8bEWAuW8XdZZOdmi3H2I=
- secure: WVCzGVsthRub6ezJU15xzo+ahlUoZEwvZDeMPmjIMf1G28ObE9Y4BeUNW0j9CxCjyQ+5S0mrp1l0TESN326XTDosigabDiGnKyr5wfncnreN3PCUi3gx7NI+bRTy9B3eV318BhuCDgLgRWLWufCyPtkgAdT6cl+u6p+bEh+vyxo=
notifications: notifications:
email: email:
on_success: never on_success: never
addons:
apt:
packages:
- libcurl4-openssl-dev
- libelf-dev
- libdw-dev

View file

@ -1,7 +1,7 @@
[package] [package]
name = "toml" name = "toml"
version = "0.1.20" version = "0.1.21"
authors = ["Alex Crichton <alex@alexcrichton.com>"] authors = ["Alex Crichton <alex@alexcrichton.com>"]
license = "MIT/Apache-2.0" license = "MIT/Apache-2.0"
readme = "README.md" readme = "README.md"

View file

@ -1,8 +1,9 @@
# toml-rs # toml-rs
[![Build Status](https://travis-ci.org/alexcrichton/toml-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/toml-rs) [![Build Status](https://travis-ci.org/alexcrichton/toml-rs.svg?branch=master)](https://travis-ci.org/alexcrichton/toml-rs)
[![Coverage Status](https://coveralls.io/repos/alexcrichton/toml-rs/badge.svg?branch=master&service=github)](https://coveralls.io/github/alexcrichton/toml-rs?branch=master)
[Documentation](http://alexcrichton.com/toml-rs/toml/index.html) [Documentation](http://alexcrichton.com/toml-rs)
A [TOML][toml] decoder and encoder for Rust. This library is currently compliant with A [TOML][toml] decoder and encoder for Rust. This library is currently compliant with
the v0.4.0 version of TOML. This library will also likely continue to stay up to the v0.4.0 version of TOML. This library will also likely continue to stay up to

57
examples/toml2json.rs Normal file
View file

@ -0,0 +1,57 @@
#![deny(warnings)]
extern crate toml;
extern crate rustc_serialize;
use std::fs::File;
use std::env;
use std::io;
use std::io::prelude::*;
use toml::Value;
use rustc_serialize::json::Json;
fn main() {
let mut args = env::args();
let mut input = String::new();
let filename = if args.len() > 1 {
let name = args.nth(1).unwrap();
File::open(&name).and_then(|mut f| {
f.read_to_string(&mut input)
}).unwrap();
name
} else {
io::stdin().read_to_string(&mut input).unwrap();
"<stdin>".to_string()
};
let mut parser = toml::Parser::new(&input);
let toml = match parser.parse() {
Some(toml) => toml,
None => {
for err in &parser.errors {
let (loline, locol) = parser.to_linecol(err.lo);
let (hiline, hicol) = parser.to_linecol(err.hi);
println!("{}:{}:{}-{}:{} error: {}",
filename, loline, locol, hiline, hicol, err.desc);
}
return
}
};
let json = convert(Value::Table(toml));
println!("{}", json.pretty());
}
fn convert(toml: Value) -> Json {
match toml {
Value::String(s) => Json::String(s),
Value::Integer(i) => Json::I64(i),
Value::Float(f) => Json::F64(f),
Value::Boolean(b) => Json::Boolean(b),
Value::Array(arr) => Json::Array(arr.into_iter().map(convert).collect()),
Value::Table(table) => Json::Object(table.into_iter().map(|(k, v)| {
(k, convert(v))
}).collect()),
Value::Datetime(dt) => Json::String(dt),
}
}

View file

@ -96,15 +96,31 @@ impl rustc_serialize::Decoder for Decoder {
-> Result<T, DecodeError> -> Result<T, DecodeError>
where F: FnMut(&mut Decoder, usize) -> Result<T, DecodeError> where F: FnMut(&mut Decoder, usize) -> Result<T, DecodeError>
{ {
let mut first_error = None; // When decoding enums, this crate takes the strategy of trying to
// decode the current TOML as all of the possible variants, returning
// success on the first one that succeeds.
//
// Note that fidelity of the errors returned here is a little nebulous,
// but we try to return the error that had the relevant field as the
// longest field. This way we hopefully match an error against what was
// most likely being written down without losing too much info.
let mut first_error = None::<DecodeError>;
for i in 0..names.len() { for i in 0..names.len() {
let mut d = self.sub_decoder(self.toml.clone(), ""); let mut d = self.sub_decoder(self.toml.clone(), "");
match f(&mut d, i) { match f(&mut d, i) {
Ok(t) => { self.toml = d.toml; return Ok(t) } Ok(t) => {
self.toml = d.toml;
return Ok(t)
}
Err(e) => { Err(e) => {
if first_error.is_none() { if let Some(ref first) = first_error {
first_error = Some(e); let my_len = e.field.as_ref().map(|s| s.len());
let first_len = first.field.as_ref().map(|s| s.len());
if my_len <= first_len {
continue
}
} }
first_error = Some(e);
} }
} }
} }
@ -158,7 +174,7 @@ impl rustc_serialize::Decoder for Decoder {
let toml = match self.toml { let toml = match self.toml {
Some(Value::Table(ref mut table)) => { Some(Value::Table(ref mut table)) => {
table.remove(&field) table.remove(&field)
.or_else(|| table.remove(&f_name.replace("_", "-"))) .or_else(|| table.remove(&f_name.replace("_", "-")))
}, },
ref found => return Err(self.mismatch("table", found)), ref found => return Err(self.mismatch("table", found)),
}; };
@ -275,8 +291,8 @@ impl rustc_serialize::Decoder for Decoder {
Some(Value::Table(ref table)) => { Some(Value::Table(ref table)) => {
match table.iter().skip(idx).next() { match table.iter().skip(idx).next() {
Some((key, _)) => { Some((key, _)) => {
let val = Value::String(format!("{}", key)); let val = Value::String(key.to_string());
f(&mut self.sub_decoder(Some(val), &**key)) f(&mut self.sub_decoder(Some(val), key))
} }
None => Err(self.err(ExpectedMapKey(idx))), None => Err(self.err(ExpectedMapKey(idx))),
} }
@ -291,9 +307,9 @@ impl rustc_serialize::Decoder for Decoder {
match self.toml { match self.toml {
Some(Value::Table(ref table)) => { Some(Value::Table(ref table)) => {
match table.iter().skip(idx).next() { match table.iter().skip(idx).next() {
Some((_, value)) => { Some((key, value)) => {
// XXX: this shouldn't clone // XXX: this shouldn't clone
f(&mut self.sub_decoder(Some(value.clone()), "")) f(&mut self.sub_decoder(Some(value.clone()), key))
} }
None => Err(self.err(ExpectedMapElement(idx))), None => Err(self.err(ExpectedMapElement(idx))),
} }
@ -309,3 +325,44 @@ impl rustc_serialize::Decoder for Decoder {
} }
} }
} }
#[cfg(test)]
mod tests {
use rustc_serialize::Decodable;
use std::collections::HashMap;
use {Parser, Decoder, Value};
#[test]
fn bad_enum_chooses_longest_error() {
#[derive(RustcDecodable)]
#[allow(dead_code)]
struct Foo {
wut: HashMap<String, Bar>,
}
#[derive(RustcDecodable)]
enum Bar {
Simple(String),
Detailed(Baz),
}
#[derive(RustcDecodable, Debug)]
struct Baz {
features: Vec<String>,
}
let s = r#"
[wut]
a = { features = "" }
"#;
let v = Parser::new(s).parse().unwrap();
let mut d = Decoder::new(Value::Table(v));
let err = match Foo::decode(&mut d) {
Ok(_) => panic!("expected error"),
Err(e) => e,
};
assert_eq!(err.field.as_ref().unwrap(), "wut.a.features");
}
}

View file

@ -44,7 +44,6 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::str::FromStr; use std::str::FromStr;
use std::string;
pub use parser::{Parser, ParserError}; pub use parser::{Parser, ParserError};
@ -77,7 +76,7 @@ pub enum Value {
pub type Array = Vec<Value>; pub type Array = Vec<Value>;
/// Type representing a TOML table, payload of the Value::Table variant /// Type representing a TOML table, payload of the Value::Table variant
pub type Table = BTreeMap<string::String, Value>; pub type Table = BTreeMap<String, Value>;
impl Value { impl Value {
/// Tests whether this and another value have the same type. /// Tests whether this and another value have the same type.

View file

@ -5,13 +5,84 @@ use std::error::Error;
use std::fmt; use std::fmt;
use std::str; use std::str;
use Table as TomlTable;
use Value::{self, Array, Table, Float, Integer, Boolean, Datetime};
macro_rules! try { macro_rules! try {
($e:expr) => (match $e { Some(s) => s, None => return None }) ($e:expr) => (match $e { Some(s) => s, None => return None })
} }
// We redefine Value because we need to keep track of encountered table
// definitions, eg when parsing:
//
// [a]
// [a.b]
// [a]
//
// we have to error out on redefinition of [a]. This bit of data is difficult to
// track in a side table so we just have a "stripped down" AST to work with
// which has the relevant metadata fields in it.
struct TomlTable {
values: BTreeMap<String, Value>,
defined: bool,
}
impl TomlTable {
fn convert(self) -> super::Table {
self.values.into_iter().map(|(k,v)| (k, v.convert())).collect()
}
}
enum Value {
String(String),
Integer(i64),
Float(f64),
Boolean(bool),
Datetime(String),
Array(Vec<Value>),
Table(TomlTable),
}
impl Value {
fn type_str(&self) -> &'static str {
match *self {
Value::String(..) => "string",
Value::Integer(..) => "integer",
Value::Float(..) => "float",
Value::Boolean(..) => "boolean",
Value::Datetime(..) => "datetime",
Value::Array(..) => "array",
Value::Table(..) => "table",
}
}
fn same_type(&self, other: &Value) -> bool {
match (self, other) {
(&Value::String(..), &Value::String(..)) |
(&Value::Integer(..), &Value::Integer(..)) |
(&Value::Float(..), &Value::Float(..)) |
(&Value::Boolean(..), &Value::Boolean(..)) |
(&Value::Datetime(..), &Value::Datetime(..)) |
(&Value::Array(..), &Value::Array(..)) |
(&Value::Table(..), &Value::Table(..)) => true,
_ => false,
}
}
fn convert(self) -> super::Value {
match self {
Value::String(x) => super::Value::String(x),
Value::Integer(x) => super::Value::Integer(x),
Value::Float(x) => super::Value::Float(x),
Value::Boolean(x) => super::Value::Boolean(x),
Value::Datetime(x) => super::Value::Datetime(x),
Value::Array(v) =>
super::Value::Array(
v.into_iter().map(|x| x.convert()).collect()
),
Value::Table(t) => super::Value::Table(t.convert())
}
}
}
/// Parser for converting a string to a TOML `Value` instance. /// Parser for converting a string to a TOML `Value` instance.
/// ///
/// This parser contains the string slice that is being parsed, and exports the /// This parser contains the string slice that is being parsed, and exports the
@ -161,8 +232,8 @@ impl<'a> Parser<'a> {
/// ///
/// If an error occurs, the `errors` field of this parser can be consulted /// If an error occurs, the `errors` field of this parser can be consulted
/// to determine the cause of the parse failure. /// to determine the cause of the parse failure.
pub fn parse(&mut self) -> Option<TomlTable> { pub fn parse(&mut self) -> Option<super::Table> {
let mut ret = BTreeMap::new(); let mut ret = TomlTable { values: BTreeMap::new(), defined: false };
while self.peek(0).is_some() { while self.peek(0).is_some() {
self.ws(); self.ws();
if self.newline() { continue } if self.newline() { continue }
@ -175,9 +246,8 @@ impl<'a> Parser<'a> {
let mut keys = Vec::new(); let mut keys = Vec::new();
loop { loop {
self.ws(); self.ws();
match self.key_name() { if let Some(s) = self.key_name() {
Some(s) => keys.push(s), keys.push(s);
None => {}
} }
self.ws(); self.ws();
if self.eat(']') { if self.eat(']') {
@ -189,12 +259,16 @@ impl<'a> Parser<'a> {
if keys.len() == 0 { return None } if keys.len() == 0 { return None }
// Build the section table // Build the section table
let mut table = BTreeMap::new(); let mut table = TomlTable {
values: BTreeMap::new(),
defined: true,
};
if !self.values(&mut table) { return None } if !self.values(&mut table) { return None }
if array { if array {
self.insert_array(&mut ret, &*keys, Table(table), start) self.insert_array(&mut ret, &keys, Value::Table(table),
start)
} else { } else {
self.insert_table(&mut ret, &*keys, table, start) self.insert_table(&mut ret, &keys, table, start)
} }
} else { } else {
if !self.values(&mut ret) { return None } if !self.values(&mut ret) { return None }
@ -203,7 +277,7 @@ impl<'a> Parser<'a> {
if self.errors.len() > 0 { if self.errors.len() > 0 {
None None
} else { } else {
Some(ret) Some(ret.convert())
} }
} }
@ -214,18 +288,13 @@ impl<'a> Parser<'a> {
self.finish_string(start, false) self.finish_string(start, false)
} else { } else {
let mut ret = String::new(); let mut ret = String::new();
loop { while let Some((_, ch)) = self.cur.clone().next() {
match self.cur.clone().next() { match ch {
Some((_, ch)) => { 'a' ... 'z' |
match ch { 'A' ... 'Z' |
'a' ... 'z' | '0' ... '9' |
'A' ... 'Z' | '_' | '-' => { self.cur.next(); ret.push(ch) }
'0' ... '9' | _ => break,
'_' | '-' => { self.cur.next(); ret.push(ch) }
_ => break,
}
}
None => break
} }
} }
Some(ret) Some(ret)
@ -344,9 +413,8 @@ impl<'a> Parser<'a> {
return Some(ret) return Some(ret)
} }
Some((pos, '\\')) => { Some((pos, '\\')) => {
match escape(self, pos, multiline) { if let Some(c) = escape(self, pos, multiline) {
Some(c) => ret.push(c), ret.push(c);
None => {}
} }
} }
Some((pos, ch)) if ch < '\u{1f}' => { Some((pos, ch)) if ch < '\u{1f}' => {
@ -391,32 +459,26 @@ impl<'a> Parser<'a> {
} else { } else {
"invalid" "invalid"
}; };
match u32::from_str_radix(num, 16).ok() { if let Some(n) = u32::from_str_radix(num, 16).ok() {
Some(n) => { if let Some(c) = char::from_u32(n) {
match char::from_u32(n) { me.cur.by_ref().skip(len - 1).next();
Some(c) => { return Some(c)
me.cur.by_ref().skip(len - 1).next(); } else {
return Some(c)
}
None => {
me.errors.push(ParserError {
lo: pos + 1,
hi: pos + 5,
desc: format!("codepoint `{:x}` is \
not a valid unicode \
codepoint", n),
})
}
}
}
None => {
me.errors.push(ParserError { me.errors.push(ParserError {
lo: pos, lo: pos + 1,
hi: pos + 1, hi: pos + 5,
desc: format!("expected {} hex digits \ desc: format!("codepoint `{:x}` is \
after a `{}` escape", len, c), not a valid unicode \
codepoint", n),
}) })
} }
} else {
me.errors.push(ParserError {
lo: pos,
hi: pos + 1,
desc: format!("expected {} hex digits \
after a `{}` escape", len, c),
})
} }
None None
} }
@ -519,9 +581,9 @@ impl<'a> Parser<'a> {
}; };
let input = input.trim_left_matches('+'); let input = input.trim_left_matches('+');
if is_float { if is_float {
input.parse().ok().map(Float) input.parse().ok().map(Value::Float)
} else { } else {
input.parse().ok().map(Integer) input.parse().ok().map(Value::Integer)
} }
}; };
if ret.is_none() { if ret.is_none() {
@ -603,12 +665,12 @@ impl<'a> Parser<'a> {
for _ in 0..4 { for _ in 0..4 {
self.cur.next(); self.cur.next();
} }
Some(Boolean(true)) Some(Value::Boolean(true))
} else if rest.starts_with("false") { } else if rest.starts_with("false") {
for _ in 0..5 { for _ in 0..5 {
self.cur.next(); self.cur.next();
} }
Some(Boolean(false)) Some(Value::Boolean(false))
} else { } else {
let next = self.next_pos(); let next = self.next_pos();
self.errors.push(ParserError { self.errors.push(ParserError {
@ -659,7 +721,7 @@ impl<'a> Parser<'a> {
valid = valid && it.next().map(is_digit).unwrap_or(false); valid = valid && it.next().map(is_digit).unwrap_or(false);
valid = valid && it.next().map(|c| c == 'Z').unwrap_or(false); valid = valid && it.next().map(|c| c == 'Z').unwrap_or(false);
if valid { if valid {
Some(Datetime(date.clone())) Some(Value::Datetime(date.clone()))
} else { } else {
self.errors.push(ParserError { self.errors.push(ParserError {
lo: start, lo: start,
@ -683,7 +745,7 @@ impl<'a> Parser<'a> {
loop { loop {
// Break out early if we see the closing bracket // Break out early if we see the closing bracket
consume(self); consume(self);
if self.eat(']') { return Some(Array(ret)) } if self.eat(']') { return Some(Value::Array(ret)) }
// Attempt to parse a value, triggering an error if it's the wrong // Attempt to parse a value, triggering an error if it's the wrong
// type. // type.
@ -709,14 +771,14 @@ impl<'a> Parser<'a> {
} }
consume(self); consume(self);
if !self.expect(']') { return None } if !self.expect(']') { return None }
return Some(Array(ret)) return Some(Value::Array(ret))
} }
fn inline_table(&mut self, _start: usize) -> Option<Value> { fn inline_table(&mut self, _start: usize) -> Option<Value> {
if !self.expect('{') { return None } if !self.expect('{') { return None }
self.ws(); self.ws();
let mut ret = BTreeMap::new(); let mut ret = TomlTable { values: BTreeMap::new(), defined: true };
if self.eat('}') { return Some(Table(ret)) } if self.eat('}') { return Some(Value::Table(ret)) }
loop { loop {
let lo = self.next_pos(); let lo = self.next_pos();
let key = try!(self.key_name()); let key = try!(self.key_name());
@ -729,19 +791,19 @@ impl<'a> Parser<'a> {
if !self.expect(',') { return None } if !self.expect(',') { return None }
self.ws(); self.ws();
} }
return Some(Table(ret)) return Some(Value::Table(ret))
} }
fn insert(&mut self, into: &mut TomlTable, key: String, value: Value, fn insert(&mut self, into: &mut TomlTable, key: String, value: Value,
key_lo: usize) { key_lo: usize) {
if into.contains_key(&key) { if into.values.contains_key(&key) {
self.errors.push(ParserError { self.errors.push(ParserError {
lo: key_lo, lo: key_lo,
hi: key_lo + key.len(), hi: key_lo + key.len(),
desc: format!("duplicate key: `{}`", key), desc: format!("duplicate key: `{}`", key),
}) })
} else { } else {
into.insert(key, value); into.values.insert(key, value);
} }
} }
@ -751,15 +813,12 @@ impl<'a> Parser<'a> {
for part in keys[..keys.len() - 1].iter() { for part in keys[..keys.len() - 1].iter() {
let tmp = cur; let tmp = cur;
if tmp.contains_key(part) { if tmp.values.contains_key(part) {
match *tmp.get_mut(part).unwrap() { match *tmp.values.get_mut(part).unwrap() {
Table(ref mut table) => { Value::Table(ref mut table) => cur = table,
cur = table; Value::Array(ref mut array) => {
continue
}
Array(ref mut array) => {
match array.last_mut() { match array.last_mut() {
Some(&mut Table(ref mut table)) => cur = table, Some(&mut Value::Table(ref mut table)) => cur = table,
_ => { _ => {
self.errors.push(ParserError { self.errors.push(ParserError {
lo: key_lo, lo: key_lo,
@ -770,7 +829,6 @@ impl<'a> Parser<'a> {
return None return None
} }
} }
continue
} }
_ => { _ => {
self.errors.push(ParserError { self.errors.push(ParserError {
@ -782,12 +840,16 @@ impl<'a> Parser<'a> {
return None return None
} }
} }
continue
} }
// Initialize an empty table as part of this sub-key // Initialize an empty table as part of this sub-key
tmp.insert(part.clone(), Table(BTreeMap::new())); tmp.values.insert(part.clone(), Value::Table(TomlTable {
match *tmp.get_mut(part).unwrap() { values: BTreeMap::new(),
Table(ref mut inner) => cur = inner, defined: false,
}));
match *tmp.values.get_mut(part).unwrap() {
Value::Table(ref mut inner) => cur = inner,
_ => unreachable!(), _ => unreachable!(),
} }
} }
@ -795,45 +857,38 @@ impl<'a> Parser<'a> {
} }
fn insert_table(&mut self, into: &mut TomlTable, keys: &[String], fn insert_table(&mut self, into: &mut TomlTable, keys: &[String],
value: TomlTable, key_lo: usize) { table: TomlTable, key_lo: usize) {
let (into, key) = match self.recurse(into, keys, key_lo) { let (into, key) = match self.recurse(into, keys, key_lo) {
Some(pair) => pair, Some(pair) => pair,
None => return, None => return,
}; };
let key = format!("{}", key); if !into.values.contains_key(key) {
let mut added = false; into.values.insert(key.to_owned(), Value::Table(table));
if !into.contains_key(&key) { return
into.insert(key.clone(), Table(BTreeMap::new()));
added = true;
} }
match into.get_mut(&key) { if let Value::Table(ref mut into) = *into.values.get_mut(key).unwrap() {
Some(&mut Table(ref mut table)) => { if into.defined {
let any_tables = table.values().any(|v| v.as_table().is_some());
if !any_tables && !added {
self.errors.push(ParserError {
lo: key_lo,
hi: key_lo + key.len(),
desc: format!("redefinition of table `{}`", key),
});
}
for (k, v) in value.into_iter() {
if table.insert(k.clone(), v).is_some() {
self.errors.push(ParserError {
lo: key_lo,
hi: key_lo + key.len(),
desc: format!("duplicate key `{}` in table", k),
});
}
}
}
Some(_) => {
self.errors.push(ParserError { self.errors.push(ParserError {
lo: key_lo, lo: key_lo,
hi: key_lo + key.len(), hi: key_lo + key.len(),
desc: format!("duplicate key `{}` in table", key), desc: format!("redefinition of table `{}`", key),
}); });
} }
None => {} for (k, v) in table.values {
if into.values.insert(k.clone(), v).is_some() {
self.errors.push(ParserError {
lo: key_lo,
hi: key_lo + key.len(),
desc: format!("duplicate key `{}` in table", k),
});
}
}
} else {
self.errors.push(ParserError {
lo: key_lo,
hi: key_lo + key.len(),
desc: format!("duplicate key `{}` in table", key),
});
} }
} }
@ -843,12 +898,11 @@ impl<'a> Parser<'a> {
Some(pair) => pair, Some(pair) => pair,
None => return, None => return,
}; };
let key = format!("{}", key); if !into.values.contains_key(key) {
if !into.contains_key(&key) { into.values.insert(key.to_owned(), Value::Array(Vec::new()));
into.insert(key.clone(), Array(Vec::new()));
} }
match *into.get_mut(&key).unwrap() { match *into.values.get_mut(key).unwrap() {
Array(ref mut vec) => { Value::Array(ref mut vec) => {
match vec.first() { match vec.first() {
Some(ref v) if !v.same_type(&value) => { Some(ref v) if !v.same_type(&value) => {
self.errors.push(ParserError { self.errors.push(ParserError {
@ -1333,4 +1387,32 @@ trimmed in raw strings.
c = 2 c = 2
", "duplicate key `c` in table"); ", "duplicate key `c` in table");
} }
#[test]
fn bad_table_redefine() {
bad!("
[a]
foo=\"bar\"
[a.b]
foo=\"bar\"
[a]
", "redefinition of table `a`");
bad!("
[a]
foo=\"bar\"
b = { foo = \"bar\" }
[a]
", "redefinition of table `a`");
bad!("
[a]
b = {}
[a.b]
", "redefinition of table `b`");
bad!("
[a]
b = {}
[a]
", "redefinition of table `a`");
}
} }