Use FromIterator<_> for TokenStream
Also remove conservative_impl_trait feature flag, as this is now a stable feature. Refs #121
This commit is contained in:
parent
0f453a5568
commit
2d6558c769
3 changed files with 6 additions and 15 deletions
|
@ -1,4 +1,3 @@
|
|||
#![feature(conservative_impl_trait)]
|
||||
#![feature(plugin)]
|
||||
#![feature(proc_macro)]
|
||||
|
||||
|
|
|
@ -34,15 +34,7 @@ impl Builder {
|
|||
/// Reifies the `Builder` into a raw list of statements.
|
||||
pub fn build(mut self) -> TokenStream {
|
||||
let Builder { stmts, .. } = { self.flush(); self };
|
||||
|
||||
// use a Group here?
|
||||
let mut tts: Vec<TokenTree> = Vec::new();
|
||||
for s in stmts.into_iter() {
|
||||
let i = s.into_iter();
|
||||
tts.extend(i);
|
||||
}
|
||||
|
||||
tts.into_iter().collect()
|
||||
stmts.into_iter().collect()
|
||||
}
|
||||
|
||||
/// Pushes a statement, flushing the tail buffer in the process.
|
||||
|
|
|
@ -10,6 +10,7 @@ use proc_macro::{
|
|||
};
|
||||
|
||||
use proc_macro::token_stream;
|
||||
use std::iter;
|
||||
use std::mem;
|
||||
|
||||
use literalext::LiteralExt;
|
||||
|
@ -302,14 +303,14 @@ impl Parser {
|
|||
}
|
||||
|
||||
fn match_arms(&mut self) -> ParseResult<TokenStream> {
|
||||
let mut arms: Vec<TokenTree> = Vec::new();
|
||||
let mut arms = Vec::new();
|
||||
while let Some(arm) = self.match_arm()? {
|
||||
arms.extend(arm);
|
||||
arms.push(arm);
|
||||
}
|
||||
Ok(arms.into_iter().collect())
|
||||
}
|
||||
|
||||
fn match_arm(&mut self) -> ParseResult<Option<Vec<TokenTree>>> {
|
||||
fn match_arm(&mut self) -> ParseResult<Option<TokenStream>> {
|
||||
let mut pat: Vec<TokenTree> = Vec::new();
|
||||
loop {
|
||||
match self.peek2() {
|
||||
|
@ -364,8 +365,7 @@ impl Parser {
|
|||
},
|
||||
None => return self.error("unexpected end of @match arm"),
|
||||
};
|
||||
pat.push(body);
|
||||
Ok(Some(pat))
|
||||
Ok(Some(pat.into_iter().chain(iter::once(body)).collect()))
|
||||
}
|
||||
|
||||
/// Parses and renders a `@let` expression.
|
||||
|
|
Loading…
Add table
Reference in a new issue