Removed all warnings besides two
Majority of warnings removed via RustFix Two remain one being a long return type and another being a name suggestion from Clippy `to_owned` for MaybeString
This commit is contained in:
parent
f5e621c048
commit
8b88c96cfc
|
@ -40,8 +40,8 @@ pub struct DatetimeParseError {
|
||||||
//
|
//
|
||||||
// In general the TOML encoder/decoder will catch this and not literally emit
|
// In general the TOML encoder/decoder will catch this and not literally emit
|
||||||
// these strings but rather emit datetimes as they're intended.
|
// these strings but rather emit datetimes as they're intended.
|
||||||
pub const FIELD: &'static str = "$__toml_private_datetime";
|
pub const FIELD: &str = "$__toml_private_datetime";
|
||||||
pub const NAME: &'static str = "$__toml_private_Datetime";
|
pub const NAME: &str = "$__toml_private_Datetime";
|
||||||
|
|
||||||
#[derive(PartialEq, Clone)]
|
#[derive(PartialEq, Clone)]
|
||||||
struct Date {
|
struct Date {
|
||||||
|
@ -135,10 +135,10 @@ impl FromStr for Datetime {
|
||||||
offset_allowed = false;
|
offset_allowed = false;
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
let y1 = digit(&mut chars)? as u16;
|
let y1 = u16::from(digit(&mut chars)?);
|
||||||
let y2 = digit(&mut chars)? as u16;
|
let y2 = u16::from(digit(&mut chars)?);
|
||||||
let y3 = digit(&mut chars)? as u16;
|
let y3 = u16::from(digit(&mut chars)?);
|
||||||
let y4 = digit(&mut chars)? as u16;
|
let y4 = u16::from(digit(&mut chars)?);
|
||||||
|
|
||||||
match chars.next() {
|
match chars.next() {
|
||||||
Some('-') => {}
|
Some('-') => {}
|
||||||
|
@ -210,7 +210,7 @@ impl FromStr for Datetime {
|
||||||
b'0'..=b'9' => {
|
b'0'..=b'9' => {
|
||||||
if i < 9 {
|
if i < 9 {
|
||||||
let p = 10_u32.pow(8 - i as u32);
|
let p = 10_u32.pow(8 - i as u32);
|
||||||
nanosecond += p * (byte - b'0') as u32;
|
nanosecond += p * u32::from(byte - b'0');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -229,7 +229,7 @@ impl FromStr for Datetime {
|
||||||
hour: h1 * 10 + h2,
|
hour: h1 * 10 + h2,
|
||||||
minute: m1 * 10 + m2,
|
minute: m1 * 10 + m2,
|
||||||
second: s1 * 10 + s2,
|
second: s1 * 10 + s2,
|
||||||
nanosecond: nanosecond,
|
nanosecond,
|
||||||
};
|
};
|
||||||
|
|
||||||
if time.hour > 24 {
|
if time.hour > 24 {
|
||||||
|
@ -292,8 +292,8 @@ impl FromStr for Datetime {
|
||||||
|
|
||||||
Ok(Datetime {
|
Ok(Datetime {
|
||||||
date: full_date,
|
date: full_date,
|
||||||
time: time,
|
time,
|
||||||
offset: offset,
|
offset,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -345,7 +345,7 @@ impl<'de> de::Deserialize<'de> for Datetime {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static FIELDS: [&'static str; 1] = [FIELD];
|
static FIELDS: [&str; 1] = [FIELD];
|
||||||
deserializer.deserialize_struct(NAME, &FIELDS, DatetimeVisitor)
|
deserializer.deserialize_struct(NAME, &FIELDS, DatetimeVisitor)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
134
src/de.rs
134
src/de.rs
|
@ -270,9 +270,9 @@ impl<'de, 'b> de::Deserializer<'de> for &'b mut Deserializer<'de> {
|
||||||
}
|
}
|
||||||
E::DottedTable(_) => visitor.visit_enum(DottedTableDeserializer {
|
E::DottedTable(_) => visitor.visit_enum(DottedTableDeserializer {
|
||||||
name: name.expect("Expected table header to be passed."),
|
name: name.expect("Expected table header to be passed."),
|
||||||
value: value,
|
value,
|
||||||
}),
|
}),
|
||||||
e @ _ => Err(Error::from_kind(
|
e => Err(Error::from_kind(
|
||||||
Some(value.start),
|
Some(value.start),
|
||||||
ErrorKind::Wanted {
|
ErrorKind::Wanted {
|
||||||
expected: "string or table",
|
expected: "string or table",
|
||||||
|
@ -425,7 +425,7 @@ impl<'de, 'b> de::MapAccess<'de> for MapVisitor<'de, 'b> {
|
||||||
cur_parent: self.cur - 1,
|
cur_parent: self.cur - 1,
|
||||||
cur: 0,
|
cur: 0,
|
||||||
max: self.max,
|
max: self.max,
|
||||||
array: array,
|
array,
|
||||||
tables: &mut *self.tables,
|
tables: &mut *self.tables,
|
||||||
de: &mut *self.de,
|
de: &mut *self.de,
|
||||||
});
|
});
|
||||||
|
@ -525,7 +525,7 @@ struct StrDeserializer<'a> {
|
||||||
|
|
||||||
impl<'a> StrDeserializer<'a> {
|
impl<'a> StrDeserializer<'a> {
|
||||||
fn new(key: Cow<'a, str>) -> StrDeserializer<'a> {
|
fn new(key: Cow<'a, str>) -> StrDeserializer<'a> {
|
||||||
StrDeserializer { key: key }
|
StrDeserializer { key }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -557,7 +557,7 @@ struct ValueDeserializer<'a> {
|
||||||
impl<'a> ValueDeserializer<'a> {
|
impl<'a> ValueDeserializer<'a> {
|
||||||
fn new(value: Value<'a>) -> ValueDeserializer<'a> {
|
fn new(value: Value<'a>) -> ValueDeserializer<'a> {
|
||||||
ValueDeserializer {
|
ValueDeserializer {
|
||||||
value: value,
|
value,
|
||||||
validate_struct_keys: false,
|
validate_struct_keys: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -615,7 +615,7 @@ impl<'de> de::Deserializer<'de> for ValueDeserializer<'de> {
|
||||||
where
|
where
|
||||||
V: de::Visitor<'de>,
|
V: de::Visitor<'de>,
|
||||||
{
|
{
|
||||||
if name == datetime::NAME && fields == &[datetime::FIELD] {
|
if name == datetime::NAME && fields == [datetime::FIELD] {
|
||||||
if let E::Datetime(s) = self.value.e {
|
if let E::Datetime(s) = self.value.e {
|
||||||
return visitor.visit_map(DatetimeDeserializer {
|
return visitor.visit_map(DatetimeDeserializer {
|
||||||
date: s,
|
date: s,
|
||||||
|
@ -625,8 +625,8 @@ impl<'de> de::Deserializer<'de> for ValueDeserializer<'de> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.validate_struct_keys {
|
if self.validate_struct_keys {
|
||||||
match &self.value.e {
|
match self.value.e {
|
||||||
&E::InlineTable(ref values) | &E::DottedTable(ref values) => {
|
E::InlineTable(ref values) | E::DottedTable(ref values) => {
|
||||||
let extra_fields = values
|
let extra_fields = values
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|key_value| {
|
.filter_map(|key_value| {
|
||||||
|
@ -656,7 +656,7 @@ impl<'de> de::Deserializer<'de> for ValueDeserializer<'de> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if name == spanned::NAME && fields == &[spanned::START, spanned::END, spanned::VALUE] {
|
if name == spanned::NAME && fields == [spanned::START, spanned::END, spanned::VALUE] {
|
||||||
let start = self.value.start;
|
let start = self.value.start;
|
||||||
let end = self.value.end;
|
let end = self.value.end;
|
||||||
|
|
||||||
|
@ -710,7 +710,7 @@ impl<'de> de::Deserializer<'de> for ValueDeserializer<'de> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
e @ _ => Err(Error::from_kind(
|
e => Err(Error::from_kind(
|
||||||
Some(self.value.start),
|
Some(self.value.start),
|
||||||
ErrorKind::Wanted {
|
ErrorKind::Wanted {
|
||||||
expected: "string or inline table",
|
expected: "string or inline table",
|
||||||
|
@ -850,7 +850,7 @@ impl<'de> de::EnumAccess<'de> for DottedTableDeserializer<'de> {
|
||||||
{
|
{
|
||||||
let (name, value) = (self.name, self.value);
|
let (name, value) = (self.name, self.value);
|
||||||
seed.deserialize(StrDeserializer::new(name))
|
seed.deserialize(StrDeserializer::new(name))
|
||||||
.map(|val| (val, TableEnumDeserializer { value: value }))
|
.map(|val| (val, TableEnumDeserializer { value }))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -905,7 +905,7 @@ impl<'de> de::EnumAccess<'de> for InlineTableDeserializer<'de> {
|
||||||
};
|
};
|
||||||
|
|
||||||
seed.deserialize(StrDeserializer::new(key))
|
seed.deserialize(StrDeserializer::new(key))
|
||||||
.map(|val| (val, TableEnumDeserializer { value: value }))
|
.map(|val| (val, TableEnumDeserializer { value }))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -920,7 +920,7 @@ impl<'de> de::VariantAccess<'de> for TableEnumDeserializer<'de> {
|
||||||
fn unit_variant(self) -> Result<(), Self::Error> {
|
fn unit_variant(self) -> Result<(), Self::Error> {
|
||||||
match self.value.e {
|
match self.value.e {
|
||||||
E::InlineTable(values) | E::DottedTable(values) => {
|
E::InlineTable(values) | E::DottedTable(values) => {
|
||||||
if values.len() == 0 {
|
if values.is_empty() {
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(Error::from_kind(
|
Err(Error::from_kind(
|
||||||
|
@ -929,7 +929,7 @@ impl<'de> de::VariantAccess<'de> for TableEnumDeserializer<'de> {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
e @ _ => Err(Error::from_kind(
|
e => Err(Error::from_kind(
|
||||||
Some(self.value.start),
|
Some(self.value.start),
|
||||||
ErrorKind::Wanted {
|
ErrorKind::Wanted {
|
||||||
expected: "table",
|
expected: "table",
|
||||||
|
@ -993,7 +993,7 @@ impl<'de> de::VariantAccess<'de> for TableEnumDeserializer<'de> {
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
e @ _ => Err(Error::from_kind(
|
e => Err(Error::from_kind(
|
||||||
Some(self.value.start),
|
Some(self.value.start),
|
||||||
ErrorKind::Wanted {
|
ErrorKind::Wanted {
|
||||||
expected: "table",
|
expected: "table",
|
||||||
|
@ -1026,7 +1026,7 @@ impl<'a> Deserializer<'a> {
|
||||||
pub fn new(input: &'a str) -> Deserializer<'a> {
|
pub fn new(input: &'a str) -> Deserializer<'a> {
|
||||||
Deserializer {
|
Deserializer {
|
||||||
tokens: Tokenizer::new(input),
|
tokens: Tokenizer::new(input),
|
||||||
input: input,
|
input,
|
||||||
require_newline_after_table: true,
|
require_newline_after_table: true,
|
||||||
allow_duplciate_after_longer_table: false,
|
allow_duplciate_after_longer_table: false,
|
||||||
}
|
}
|
||||||
|
@ -1080,10 +1080,10 @@ impl<'a> Deserializer<'a> {
|
||||||
tables.push(cur_table);
|
tables.push(cur_table);
|
||||||
}
|
}
|
||||||
cur_table = Table {
|
cur_table = Table {
|
||||||
at: at,
|
at,
|
||||||
header: Vec::new(),
|
header: Vec::new(),
|
||||||
values: Some(Vec::new()),
|
values: Some(Vec::new()),
|
||||||
array: array,
|
array,
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
let part = header.next().map_err(|e| self.token_error(e));
|
let part = header.next().map_err(|e| self.token_error(e));
|
||||||
|
@ -1151,7 +1151,7 @@ impl<'a> Deserializer<'a> {
|
||||||
Ok(Line::Table {
|
Ok(Line::Table {
|
||||||
at: start,
|
at: start,
|
||||||
header: ret,
|
header: ret,
|
||||||
array: array,
|
array,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1175,33 +1175,33 @@ impl<'a> Deserializer<'a> {
|
||||||
let value = match self.next()? {
|
let value = match self.next()? {
|
||||||
Some((Span { start, end }, Token::String { val, .. })) => Value {
|
Some((Span { start, end }, Token::String { val, .. })) => Value {
|
||||||
e: E::String(val),
|
e: E::String(val),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
},
|
},
|
||||||
Some((Span { start, end }, Token::Keylike("true"))) => Value {
|
Some((Span { start, end }, Token::Keylike("true"))) => Value {
|
||||||
e: E::Boolean(true),
|
e: E::Boolean(true),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
},
|
},
|
||||||
Some((Span { start, end }, Token::Keylike("false"))) => Value {
|
Some((Span { start, end }, Token::Keylike("false"))) => Value {
|
||||||
e: E::Boolean(false),
|
e: E::Boolean(false),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
},
|
},
|
||||||
Some((span, Token::Keylike(key))) => self.number_or_date(span, key)?,
|
Some((span, Token::Keylike(key))) => self.number_or_date(span, key)?,
|
||||||
Some((span, Token::Plus)) => self.number_leading_plus(span)?,
|
Some((span, Token::Plus)) => self.number_leading_plus(span)?,
|
||||||
Some((Span { start, .. }, Token::LeftBrace)) => {
|
Some((Span { start, .. }, Token::LeftBrace)) => {
|
||||||
self.inline_table().map(|(Span { end, .. }, table)| Value {
|
self.inline_table().map(|(Span { end, .. }, table)| Value {
|
||||||
e: E::InlineTable(table),
|
e: E::InlineTable(table),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})?
|
})?
|
||||||
}
|
}
|
||||||
Some((Span { start, .. }, Token::LeftBracket)) => {
|
Some((Span { start, .. }, Token::LeftBracket)) => {
|
||||||
self.array().map(|(Span { end, .. }, array)| Value {
|
self.array().map(|(Span { end, .. }, array)| Value {
|
||||||
e: E::Array(array),
|
e: E::Array(array),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})?
|
})?
|
||||||
}
|
}
|
||||||
Some(token) => {
|
Some(token) => {
|
||||||
|
@ -1226,15 +1226,15 @@ impl<'a> Deserializer<'a> {
|
||||||
self.datetime(span, s, false)
|
self.datetime(span, s, false)
|
||||||
.map(|(Span { start, end }, d)| Value {
|
.map(|(Span { start, end }, d)| Value {
|
||||||
e: E::Datetime(d),
|
e: E::Datetime(d),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
} else if self.eat(Token::Colon)? {
|
} else if self.eat(Token::Colon)? {
|
||||||
self.datetime(span, s, true)
|
self.datetime(span, s, true)
|
||||||
.map(|(Span { start, end }, d)| Value {
|
.map(|(Span { start, end }, d)| Value {
|
||||||
e: E::Datetime(d),
|
e: E::Datetime(d),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
self.number(span, s)
|
self.number(span, s)
|
||||||
|
@ -1282,8 +1282,8 @@ impl<'a> Deserializer<'a> {
|
||||||
Ok((
|
Ok((
|
||||||
Value {
|
Value {
|
||||||
e: E::DottedTable(table.values.unwrap_or_else(Vec::new)),
|
e: E::DottedTable(table.values.unwrap_or_else(Vec::new)),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
},
|
},
|
||||||
Some(header.clone()),
|
Some(header.clone()),
|
||||||
))
|
))
|
||||||
|
@ -1296,8 +1296,8 @@ impl<'a> Deserializer<'a> {
|
||||||
fn number(&mut self, Span { start, end }: Span, s: &'a str) -> Result<Value<'a>, Error> {
|
fn number(&mut self, Span { start, end }: Span, s: &'a str) -> Result<Value<'a>, Error> {
|
||||||
let to_integer = |f| Value {
|
let to_integer = |f| Value {
|
||||||
e: E::Integer(f),
|
e: E::Integer(f),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
};
|
};
|
||||||
if s.starts_with("0x") {
|
if s.starts_with("0x") {
|
||||||
self.integer(&s[2..], 16).map(to_integer)
|
self.integer(&s[2..], 16).map(to_integer)
|
||||||
|
@ -1308,8 +1308,8 @@ impl<'a> Deserializer<'a> {
|
||||||
} else if s.contains('e') || s.contains('E') {
|
} else if s.contains('e') || s.contains('E') {
|
||||||
self.float(s, None).map(|f| Value {
|
self.float(s, None).map(|f| Value {
|
||||||
e: E::Float(f),
|
e: E::Float(f),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
} else if self.eat(Token::Period)? {
|
} else if self.eat(Token::Period)? {
|
||||||
let at = self.tokens.current();
|
let at = self.tokens.current();
|
||||||
|
@ -1317,8 +1317,8 @@ impl<'a> Deserializer<'a> {
|
||||||
Some((Span { start, end }, Token::Keylike(after))) => {
|
Some((Span { start, end }, Token::Keylike(after))) => {
|
||||||
self.float(s, Some(after)).map(|f| Value {
|
self.float(s, Some(after)).map(|f| Value {
|
||||||
e: E::Float(f),
|
e: E::Float(f),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
_ => Err(self.error(at, ErrorKind::NumberInvalid)),
|
_ => Err(self.error(at, ErrorKind::NumberInvalid)),
|
||||||
|
@ -1326,26 +1326,26 @@ impl<'a> Deserializer<'a> {
|
||||||
} else if s == "inf" {
|
} else if s == "inf" {
|
||||||
Ok(Value {
|
Ok(Value {
|
||||||
e: E::Float(f64::INFINITY),
|
e: E::Float(f64::INFINITY),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
} else if s == "-inf" {
|
} else if s == "-inf" {
|
||||||
Ok(Value {
|
Ok(Value {
|
||||||
e: E::Float(f64::NEG_INFINITY),
|
e: E::Float(f64::NEG_INFINITY),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
} else if s == "nan" {
|
} else if s == "nan" {
|
||||||
Ok(Value {
|
Ok(Value {
|
||||||
e: E::Float(f64::NAN),
|
e: E::Float(f64::NAN),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
} else if s == "-nan" {
|
} else if s == "-nan" {
|
||||||
Ok(Value {
|
Ok(Value {
|
||||||
e: E::Float(-f64::NAN),
|
e: E::Float(-f64::NAN),
|
||||||
start: start,
|
start,
|
||||||
end: end,
|
end,
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
self.integer(s, 10).map(to_integer)
|
self.integer(s, 10).map(to_integer)
|
||||||
|
@ -1355,13 +1355,7 @@ impl<'a> Deserializer<'a> {
|
||||||
fn number_leading_plus(&mut self, Span { start, .. }: Span) -> Result<Value<'a>, Error> {
|
fn number_leading_plus(&mut self, Span { start, .. }: Span) -> Result<Value<'a>, Error> {
|
||||||
let start_token = self.tokens.current();
|
let start_token = self.tokens.current();
|
||||||
match self.next()? {
|
match self.next()? {
|
||||||
Some((Span { end, .. }, Token::Keylike(s))) => self.number(
|
Some((Span { end, .. }, Token::Keylike(s))) => self.number(Span { start, end }, s),
|
||||||
Span {
|
|
||||||
start: start,
|
|
||||||
end: end,
|
|
||||||
},
|
|
||||||
s,
|
|
||||||
),
|
|
||||||
_ => Err(self.error(start_token, ErrorKind::NumberInvalid)),
|
_ => Err(self.error(start_token, ErrorKind::NumberInvalid)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1752,13 +1746,7 @@ impl<'a> Deserializer<'a> {
|
||||||
at,
|
at,
|
||||||
expected,
|
expected,
|
||||||
found,
|
found,
|
||||||
} => self.error(
|
} => self.error(at, ErrorKind::Wanted { expected, found }),
|
||||||
at,
|
|
||||||
ErrorKind::Wanted {
|
|
||||||
expected: expected,
|
|
||||||
found: found,
|
|
||||||
},
|
|
||||||
),
|
|
||||||
TokenError::EmptyTableKey(at) => self.error(at, ErrorKind::EmptyTableKey),
|
TokenError::EmptyTableKey(at) => self.error(at, ErrorKind::EmptyTableKey),
|
||||||
TokenError::MultilineStringKey(at) => self.error(at, ErrorKind::MultilineStringKey),
|
TokenError::MultilineStringKey(at) => self.error(at, ErrorKind::MultilineStringKey),
|
||||||
}
|
}
|
||||||
|
@ -1799,7 +1787,7 @@ impl Error {
|
||||||
fn from_kind(at: Option<usize>, kind: ErrorKind) -> Error {
|
fn from_kind(at: Option<usize>, kind: ErrorKind) -> Error {
|
||||||
Error {
|
Error {
|
||||||
inner: Box::new(ErrorInner {
|
inner: Box::new(ErrorInner {
|
||||||
kind: kind,
|
kind,
|
||||||
line: None,
|
line: None,
|
||||||
col: 0,
|
col: 0,
|
||||||
at,
|
at,
|
||||||
|
@ -1829,7 +1817,7 @@ impl Error {
|
||||||
self.inner.key.insert(0, key.to_string());
|
self.inner.key.insert(0, key.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fix_offset<F>(&mut self, f: F) -> ()
|
fn fix_offset<F>(&mut self, f: F)
|
||||||
where
|
where
|
||||||
F: FnOnce() -> Option<usize>,
|
F: FnOnce() -> Option<usize>,
|
||||||
{
|
{
|
||||||
|
@ -1840,7 +1828,7 @@ impl Error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fix_linecol<F>(&mut self, f: F) -> ()
|
fn fix_linecol<F>(&mut self, f: F)
|
||||||
where
|
where
|
||||||
F: FnOnce(usize) -> (usize, usize),
|
F: FnOnce(usize) -> (usize, usize),
|
||||||
{
|
{
|
||||||
|
@ -1854,7 +1842,7 @@ impl Error {
|
||||||
|
|
||||||
impl std::convert::From<Error> for std::io::Error {
|
impl std::convert::From<Error> for std::io::Error {
|
||||||
fn from(e: Error) -> Self {
|
fn from(e: Error) -> Self {
|
||||||
return std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string());
|
std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1995,9 +1983,9 @@ impl<'a> Header<'a> {
|
||||||
fn new(tokens: Tokenizer<'a>, array: bool, require_newline_after_table: bool) -> Header<'a> {
|
fn new(tokens: Tokenizer<'a>, array: bool, require_newline_after_table: bool) -> Header<'a> {
|
||||||
Header {
|
Header {
|
||||||
first: true,
|
first: true,
|
||||||
array: array,
|
array,
|
||||||
tokens: tokens,
|
tokens,
|
||||||
require_newline_after_table: require_newline_after_table,
|
require_newline_after_table,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2015,10 +2003,8 @@ impl<'a> Header<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.tokens.eat_whitespace()?;
|
self.tokens.eat_whitespace()?;
|
||||||
if self.require_newline_after_table {
|
if self.require_newline_after_table && !self.tokens.eat_comment()? {
|
||||||
if !self.tokens.eat_comment()? {
|
self.tokens.eat_newline_or_eof()?;
|
||||||
self.tokens.eat_newline_or_eof()?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
|
@ -150,8 +150,8 @@ impl Map<String, Value> {
|
||||||
use std::collections::btree_map::Entry as EntryImpl;
|
use std::collections::btree_map::Entry as EntryImpl;
|
||||||
|
|
||||||
match self.map.entry(key.into()) {
|
match self.map.entry(key.into()) {
|
||||||
EntryImpl::Vacant(vacant) => Entry::Vacant(VacantEntry { vacant: vacant }),
|
EntryImpl::Vacant(vacant) => Entry::Vacant(VacantEntry { vacant }),
|
||||||
EntryImpl::Occupied(occupied) => Entry::Occupied(OccupiedEntry { occupied: occupied }),
|
EntryImpl::Occupied(occupied) => Entry::Occupied(OccupiedEntry { occupied }),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
59
src/ser.rs
59
src/ser.rs
|
@ -247,7 +247,7 @@ impl<'a> Serializer<'a> {
|
||||||
/// will be present in `dst`.
|
/// will be present in `dst`.
|
||||||
pub fn new(dst: &'a mut String) -> Serializer<'a> {
|
pub fn new(dst: &'a mut String) -> Serializer<'a> {
|
||||||
Serializer {
|
Serializer {
|
||||||
dst: dst,
|
dst,
|
||||||
state: State::End,
|
state: State::End,
|
||||||
settings: Rc::new(Settings::default()),
|
settings: Rc::new(Settings::default()),
|
||||||
}
|
}
|
||||||
|
@ -263,7 +263,7 @@ impl<'a> Serializer<'a> {
|
||||||
/// have a trailing comma. See `Serializer::pretty_array`
|
/// have a trailing comma. See `Serializer::pretty_array`
|
||||||
pub fn pretty(dst: &'a mut String) -> Serializer<'a> {
|
pub fn pretty(dst: &'a mut String) -> Serializer<'a> {
|
||||||
Serializer {
|
Serializer {
|
||||||
dst: dst,
|
dst,
|
||||||
state: State::End,
|
state: State::End,
|
||||||
settings: Rc::new(Settings {
|
settings: Rc::new(Settings {
|
||||||
array: Some(ArraySettings::pretty()),
|
array: Some(ArraySettings::pretty()),
|
||||||
|
@ -331,13 +331,12 @@ impl<'a> Serializer<'a> {
|
||||||
/// """
|
/// """
|
||||||
/// ```
|
/// ```
|
||||||
pub fn pretty_string_literal(&mut self, value: bool) -> &mut Self {
|
pub fn pretty_string_literal(&mut self, value: bool) -> &mut Self {
|
||||||
let use_default =
|
let use_default = if let Some(ref mut s) = Rc::get_mut(&mut self.settings).unwrap().string {
|
||||||
if let &mut Some(ref mut s) = &mut Rc::get_mut(&mut self.settings).unwrap().string {
|
s.literal = value;
|
||||||
s.literal = value;
|
false
|
||||||
false
|
} else {
|
||||||
} else {
|
true
|
||||||
true
|
};
|
||||||
};
|
|
||||||
|
|
||||||
if use_default {
|
if use_default {
|
||||||
let mut string = StringSettings::pretty();
|
let mut string = StringSettings::pretty();
|
||||||
|
@ -387,13 +386,12 @@ impl<'a> Serializer<'a> {
|
||||||
///
|
///
|
||||||
/// See `Serializer::pretty_array` for more details.
|
/// See `Serializer::pretty_array` for more details.
|
||||||
pub fn pretty_array_indent(&mut self, value: usize) -> &mut Self {
|
pub fn pretty_array_indent(&mut self, value: usize) -> &mut Self {
|
||||||
let use_default =
|
let use_default = if let Some(ref mut a) = Rc::get_mut(&mut self.settings).unwrap().array {
|
||||||
if let &mut Some(ref mut a) = &mut Rc::get_mut(&mut self.settings).unwrap().array {
|
a.indent = value;
|
||||||
a.indent = value;
|
false
|
||||||
false
|
} else {
|
||||||
} else {
|
true
|
||||||
true
|
};
|
||||||
};
|
|
||||||
|
|
||||||
if use_default {
|
if use_default {
|
||||||
let mut array = ArraySettings::pretty();
|
let mut array = ArraySettings::pretty();
|
||||||
|
@ -407,13 +405,12 @@ impl<'a> Serializer<'a> {
|
||||||
///
|
///
|
||||||
/// See `Serializer::pretty_array` for more details.
|
/// See `Serializer::pretty_array` for more details.
|
||||||
pub fn pretty_array_trailing_comma(&mut self, value: bool) -> &mut Self {
|
pub fn pretty_array_trailing_comma(&mut self, value: bool) -> &mut Self {
|
||||||
let use_default =
|
let use_default = if let Some(ref mut a) = Rc::get_mut(&mut self.settings).unwrap().array {
|
||||||
if let &mut Some(ref mut a) = &mut Rc::get_mut(&mut self.settings).unwrap().array {
|
a.trailing_comma = value;
|
||||||
a.trailing_comma = value;
|
false
|
||||||
false
|
} else {
|
||||||
} else {
|
true
|
||||||
true
|
};
|
||||||
};
|
|
||||||
|
|
||||||
if use_default {
|
if use_default {
|
||||||
let mut array = ArraySettings::pretty();
|
let mut array = ArraySettings::pretty();
|
||||||
|
@ -610,7 +607,7 @@ impl<'a> Serializer<'a> {
|
||||||
(&Some(StringSettings { literal: false, .. }), Repr::Literal(_, ty)) => {
|
(&Some(StringSettings { literal: false, .. }), Repr::Literal(_, ty)) => {
|
||||||
Repr::Std(ty)
|
Repr::Std(ty)
|
||||||
}
|
}
|
||||||
(_, r @ _) => r,
|
(_, r) => r,
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Repr::Std(Type::OnelineSingle)
|
Repr::Std(Type::OnelineSingle)
|
||||||
|
@ -902,7 +899,7 @@ impl<'a, 'b> ser::Serializer for &'b mut Serializer<'a> {
|
||||||
ser: self,
|
ser: self,
|
||||||
first: Cell::new(true),
|
first: Cell::new(true),
|
||||||
type_: Cell::new(None),
|
type_: Cell::new(None),
|
||||||
len: len,
|
len,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1099,10 +1096,10 @@ impl<'a, 'b> ser::SerializeMap for SerializeTable<'a, 'b> {
|
||||||
let res = value.serialize(&mut Serializer {
|
let res = value.serialize(&mut Serializer {
|
||||||
dst: &mut *ser.dst,
|
dst: &mut *ser.dst,
|
||||||
state: State::Table {
|
state: State::Table {
|
||||||
key: key,
|
key,
|
||||||
parent: &ser.state,
|
parent: &ser.state,
|
||||||
first: first,
|
first,
|
||||||
table_emitted: table_emitted,
|
table_emitted,
|
||||||
},
|
},
|
||||||
settings: ser.settings.clone(),
|
settings: ser.settings.clone(),
|
||||||
});
|
});
|
||||||
|
@ -1155,10 +1152,10 @@ impl<'a, 'b> ser::SerializeStruct for SerializeTable<'a, 'b> {
|
||||||
let res = value.serialize(&mut Serializer {
|
let res = value.serialize(&mut Serializer {
|
||||||
dst: &mut *ser.dst,
|
dst: &mut *ser.dst,
|
||||||
state: State::Table {
|
state: State::Table {
|
||||||
key: key,
|
key,
|
||||||
parent: &ser.state,
|
parent: &ser.state,
|
||||||
first: first,
|
first,
|
||||||
table_emitted: table_emitted,
|
table_emitted,
|
||||||
},
|
},
|
||||||
settings: ser.settings.clone(),
|
settings: ser.settings.clone(),
|
||||||
});
|
});
|
||||||
|
|
|
@ -2,13 +2,13 @@ use serde::{de, ser};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub const NAME: &'static str = "$__toml_private_Spanned";
|
pub const NAME: &str = "$__toml_private_Spanned";
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub const START: &'static str = "$__toml_private_start";
|
pub const START: &str = "$__toml_private_start";
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub const END: &'static str = "$__toml_private_end";
|
pub const END: &str = "$__toml_private_end";
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub const VALUE: &'static str = "$__toml_private_value";
|
pub const VALUE: &str = "$__toml_private_value";
|
||||||
|
|
||||||
/// A spanned value, indicating the range at which it is defined in the source.
|
/// A spanned value, indicating the range at which it is defined in the source.
|
||||||
///
|
///
|
||||||
|
@ -116,17 +116,13 @@ where
|
||||||
|
|
||||||
let value: T = visitor.next_value()?;
|
let value: T = visitor.next_value()?;
|
||||||
|
|
||||||
Ok(Spanned {
|
Ok(Spanned { start, end, value })
|
||||||
start: start,
|
|
||||||
end: end,
|
|
||||||
value: value,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let visitor = SpannedVisitor(::std::marker::PhantomData);
|
let visitor = SpannedVisitor(::std::marker::PhantomData);
|
||||||
|
|
||||||
static FIELDS: [&'static str; 3] = [START, END, VALUE];
|
static FIELDS: [&str; 3] = [START, END, VALUE];
|
||||||
deserializer.deserialize_struct(NAME, &FIELDS, visitor)
|
deserializer.deserialize_struct(NAME, &FIELDS, visitor)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -84,7 +84,7 @@ enum MaybeString {
|
||||||
impl<'a> Tokenizer<'a> {
|
impl<'a> Tokenizer<'a> {
|
||||||
pub fn new(input: &'a str) -> Tokenizer<'a> {
|
pub fn new(input: &'a str) -> Tokenizer<'a> {
|
||||||
let mut t = Tokenizer {
|
let mut t = Tokenizer {
|
||||||
input: input,
|
input,
|
||||||
chars: CrlfFold {
|
chars: CrlfFold {
|
||||||
chars: input.char_indices(),
|
chars: input.char_indices(),
|
||||||
},
|
},
|
||||||
|
@ -266,7 +266,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
.clone()
|
.clone()
|
||||||
.next()
|
.next()
|
||||||
.map(|i| i.0)
|
.map(|i| i.0)
|
||||||
.unwrap_or(self.input.len())
|
.unwrap_or_else(|| self.input.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn input(&self) -> &'a str {
|
pub fn input(&self) -> &'a str {
|
||||||
|
@ -349,7 +349,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
return Ok(String {
|
return Ok(String {
|
||||||
src: &self.input[start..self.current()],
|
src: &self.input[start..self.current()],
|
||||||
val: val.into_cow(&self.input[..i]),
|
val: val.into_cow(&self.input[..i]),
|
||||||
multiline: multiline,
|
multiline,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Some((i, c)) => new_ch(self, &mut val, multiline, i, c)?,
|
Some((i, c)) => new_ch(self, &mut val, multiline, i, c)?,
|
||||||
|
@ -463,10 +463,7 @@ impl<'a> Tokenizer<'a> {
|
||||||
.peek_one()
|
.peek_one()
|
||||||
.map(|t| t.0)
|
.map(|t| t.0)
|
||||||
.unwrap_or_else(|| self.input.len());
|
.unwrap_or_else(|| self.input.len());
|
||||||
Span {
|
Span { start, end }
|
||||||
start: start,
|
|
||||||
end: end,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Peek one char without consuming it.
|
/// Peek one char without consuming it.
|
||||||
|
@ -642,7 +639,7 @@ mod tests {
|
||||||
err(r#""\U00""#, Error::InvalidHexEscape(5, '"'));
|
err(r#""\U00""#, Error::InvalidHexEscape(5, '"'));
|
||||||
err(r#""\U00"#, Error::UnterminatedString(0));
|
err(r#""\U00"#, Error::UnterminatedString(0));
|
||||||
err(r#""\uD800"#, Error::InvalidEscapeValue(2, 0xd800));
|
err(r#""\uD800"#, Error::InvalidEscapeValue(2, 0xd800));
|
||||||
err(r#""\UFFFFFFFF"#, Error::InvalidEscapeValue(2, 0xffffffff));
|
err(r#""\UFFFFFFFF"#, Error::InvalidEscapeValue(2, 0xffff_ffff));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -749,7 +749,7 @@ impl ser::Serializer for Serializer {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_i64(self, value: i64) -> Result<Value, crate::ser::Error> {
|
fn serialize_i64(self, value: i64) -> Result<Value, crate::ser::Error> {
|
||||||
Ok(Value::Integer(value.into()))
|
Ok(Value::Integer(value))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn serialize_u8(self, value: u8) -> Result<Value, crate::ser::Error> {
|
fn serialize_u8(self, value: u8) -> Result<Value, crate::ser::Error> {
|
||||||
|
|
Loading…
Reference in a new issue