Skip to content

Commit

Permalink
Merge pull request #18 from bjz/master
Browse files Browse the repository at this point in the history
Update to latest rustc
  • Loading branch information
netvl committed Oct 22, 2014
2 parents f86f02b + 250ead2 commit 4c398c7
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 15 deletions.
28 changes: 14 additions & 14 deletions src/reader/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ impl Token {
#[inline]
pub fn contains_char_data(&self) -> bool {
match *self {
Whitespace(_) | Chunk(_) | Character(_) |
Whitespace(_) | Chunk(_) | Character(_) |
TagEnd | EqualsSign | DoubleQuote | SingleQuote => true,
_ => false
}
Expand Down Expand Up @@ -166,8 +166,8 @@ type LexStep = Option<LexResult>; // TODO: make up with better name
/// Helps to set up a dispatch table for lexing large unambigous tokens like
/// `<![CDATA[` or `<!DOCTYPE `.
macro_rules! dispatch_on_enum_state(
($_self:ident, $s:expr, $c:expr, $is:ident,
$($st:ident -> $stc:pat -> $next_st:ident ! $chunk:expr),+;
($_self:ident, $s:expr, $c:expr, $is:ident,
$($st:ident -> $stc:pat -> $next_st:ident ! $chunk:expr),+;
$end_st:ident -> $end_c:pat ! $end_chunk:expr -> $e:expr) => (
match $s {
$(
Expand All @@ -187,7 +187,7 @@ macro_rules! dispatch_on_enum_state(
/// `PullLexer` is a lexer for XML documents, which implements pull API.
///
/// Main method is `next_token` which accepts an `std::io::Buffer` and
/// tries to read the next lexeme from it.
/// tries to read the next lexeme from it.
///
/// When `skip_errors` flag is set, invalid lexemes will be returned as `Chunk`s.
/// When it is not set, errors will be reported as `Err` objects with a string message.
Expand Down Expand Up @@ -241,7 +241,7 @@ impl PullLexer {
/// this method is called, but the resulting behavior is undefined.
///
/// Returns `None` when logical end of stream is encountered, that is,
/// after `b.read_char()` returns `None` and the current state is
/// after `b.read_char()` returns `None` and the current state is
/// is exhausted.
pub fn next_token<B: Buffer>(&mut self, b: &mut B) -> Option<LexResult> {
// Already reached end of buffer
Expand Down Expand Up @@ -269,9 +269,9 @@ impl PullLexer {
// Handle end of stream
self.eof_handled = true;
match self.st {
TagOpened | CommentOrCDataOrDoctypeStarted |
TagOpened | CommentOrCDataOrDoctypeStarted |
CommentStarted | CDataStarted(_)| DoctypeStarted(_) |
CommentClosing(Second) =>
CommentClosing(Second) =>
Some(Err(self.error("Unexpected end of stream"))),
ProcessingInstructionClosing =>
Some(Ok(Character('?'))),
Expand Down Expand Up @@ -331,7 +331,7 @@ impl PullLexer {
self.st = st;
Some(Ok(token))
}

#[inline]
fn move_to_with_unread(&mut self, st: State, c: char, token: Token) -> LexStep {
self.temp_char = Some(c);
Expand Down Expand Up @@ -409,7 +409,7 @@ impl PullLexer {
CD -> 'A' -> CDA ! "<![CD",
CDA -> 'T' -> CDAT ! "<![CDA",
CDAT -> 'A' -> CDATA ! "<![CDAT";
CDATA -> '[' ! "<![CDATA" -> self.move_to_with(Normal, CDataStart)
CDATA -> '[' ! "<![CDATA" -> self.move_to_with(Normal, CDataStart)
)
}

Expand Down Expand Up @@ -511,7 +511,7 @@ mod tests {
(for $lex:ident and $buf:ident expect row $r:expr col $c:expr, $s:expr) => ({
let err = $lex.next_token(&mut $buf);
assert!(err.is_some());
assert!(err.get_ref().is_err());
assert!(err.as_ref().unwrap().is_err());
let err = err.unwrap().unwrap_err();
assert_eq!($r as uint, err.row());
assert_eq!($c as uint, err.col());
Expand Down Expand Up @@ -692,7 +692,7 @@ mod tests {
($data:expr -> $r:expr, $c:expr) => ({
let (mut lex, mut buf) = make_lex_and_buf($data);
assert_err!(for lex and buf expect row $r col $c, "Unexpected end of stream");
assert_none!(for lex and buf);
assert_none!(for lex and buf);
})
)
eof_check!("<" -> 0, 1);
Expand All @@ -716,7 +716,7 @@ mod tests {

let (mut lex, mut buf) = make_lex_and_buf("<!x");
lex.disable_errors();
assert_oks!(for lex and buf
assert_oks!(for lex and buf
Chunk("<!")
Character('x')
);
Expand All @@ -732,7 +732,7 @@ mod tests {

let (mut lex, mut buf) = make_lex_and_buf("<!-\t");
lex.disable_errors();
assert_oks!(for lex and buf
assert_oks!(for lex and buf
Chunk("<!-")
Whitespace('\t')
);
Expand All @@ -746,7 +746,7 @@ mod tests {

let (mut lex, mut buf) = make_lex_and_buf($data);
lex.disable_errors();
assert_oks!(for lex and buf
assert_oks!(for lex and buf
Chunk($chunk)
Character($app)
);
Expand Down
2 changes: 1 addition & 1 deletion src/writer/emitter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl fmt::Show for EmitterError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "Emitter error: {}", self.message));
if self.cause.is_some() {
write!(f, "; caused by: {}", *self.cause.get_ref())
write!(f, "; caused by: {}", *self.cause.as_ref().unwrap())
} else {
Ok(())
}
Expand Down

0 comments on commit 4c398c7

Please sign in to comment.