Format everything!
This commit is contained in:
parent
db28f4c100
commit
50a7e9ce37
20 changed files with 578 additions and 348 deletions
benchmarks/benches
docs/src
maud
maud_htmlescape
maud_macros/src
|
@ -5,7 +5,8 @@ extern crate test;
|
||||||
use askama::Template;
|
use askama::Template;
|
||||||
|
|
||||||
#[derive(Template)]
|
#[derive(Template)]
|
||||||
#[template(source = r#"
|
#[template(
|
||||||
|
source = r#"
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>{{year}}</title>
|
<title>{{year}}</title>
|
||||||
|
@ -20,25 +21,44 @@ use askama::Template;
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
</body>
|
</body>
|
||||||
</html>"#, ext="html")]
|
</html>"#,
|
||||||
|
ext = "html"
|
||||||
|
)]
|
||||||
|
|
||||||
struct BenchTemplate {
|
struct BenchTemplate {
|
||||||
year: &'static str,
|
year: &'static str,
|
||||||
teams: Vec<Entry>
|
teams: Vec<Entry>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Entry {
|
struct Entry {
|
||||||
name: &'static str,
|
name: &'static str,
|
||||||
score: u16
|
score: u16,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
fn render_template(b: &mut test::Bencher) {
|
fn render_template(b: &mut test::Bencher) {
|
||||||
let teams = vec![Entry {name: "Jiangsu", score: 43},
|
let teams = vec![
|
||||||
Entry {name: "Beijing", score: 27},
|
Entry {
|
||||||
Entry {name: "Guangzhou", score: 22},
|
name: "Jiangsu",
|
||||||
Entry {name: "Shandong", score: 12}];
|
score: 43,
|
||||||
let hello = test::black_box(BenchTemplate{ year: "2015", teams });
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Beijing",
|
||||||
|
score: 27,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Guangzhou",
|
||||||
|
score: 22,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Shandong",
|
||||||
|
score: 12,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
let hello = test::black_box(BenchTemplate {
|
||||||
|
year: "2015",
|
||||||
|
teams,
|
||||||
|
});
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
// Instead of simply call hello.render().unwrap(), rendering to
|
// Instead of simply call hello.render().unwrap(), rendering to
|
||||||
// a string with a good capacity gives a ~10% speed increase here
|
// a string with a good capacity gives a ~10% speed increase here
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
extern crate test;
|
extern crate test;
|
||||||
|
|
||||||
use maud::{Markup, html};
|
use maud::{html, Markup};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct Entry {
|
struct Entry {
|
||||||
|
@ -12,12 +12,12 @@ struct Entry {
|
||||||
}
|
}
|
||||||
|
|
||||||
mod btn {
|
mod btn {
|
||||||
use maud::{Markup, Render, html};
|
use maud::{html, Markup, Render};
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
pub enum RequestMethod {
|
pub enum RequestMethod {
|
||||||
Get,
|
Get,
|
||||||
Post
|
Post,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone)]
|
#[derive(Copy, Clone)]
|
||||||
|
@ -77,14 +77,28 @@ fn layout<S: AsRef<str>>(title: S, inner: Markup) -> Markup {
|
||||||
fn render_complicated_template(b: &mut test::Bencher) {
|
fn render_complicated_template(b: &mut test::Bencher) {
|
||||||
let year = test::black_box("2015");
|
let year = test::black_box("2015");
|
||||||
let teams = test::black_box(vec![
|
let teams = test::black_box(vec![
|
||||||
Entry { name: "Jiangsu", score: 43 },
|
Entry {
|
||||||
Entry { name: "Beijing", score: 27 },
|
name: "Jiangsu",
|
||||||
Entry { name: "Guangzhou", score: 22 },
|
score: 43,
|
||||||
Entry { name: "Shandong", score: 12 },
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Beijing",
|
||||||
|
score: 27,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Guangzhou",
|
||||||
|
score: 22,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Shandong",
|
||||||
|
score: 12,
|
||||||
|
},
|
||||||
]);
|
]);
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
use crate::btn::{Button, RequestMethod};
|
use crate::btn::{Button, RequestMethod};
|
||||||
layout(format!("Homepage of {}", year), html! {
|
layout(
|
||||||
|
format!("Homepage of {}", year),
|
||||||
|
html! {
|
||||||
h1 { "Hello there!" }
|
h1 { "Hello there!" }
|
||||||
|
|
||||||
@for entry in &teams {
|
@for entry in &teams {
|
||||||
|
@ -95,6 +109,7 @@ fn render_complicated_template(b: &mut test::Bencher) {
|
||||||
.with_method(RequestMethod::Post))
|
.with_method(RequestMethod::Post))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})
|
},
|
||||||
|
)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,8 +4,8 @@
|
||||||
|
|
||||||
extern crate test;
|
extern crate test;
|
||||||
|
|
||||||
use serde_json::value::{Map, Value as Json};
|
|
||||||
use handlebars::{to_json, Handlebars};
|
use handlebars::{to_json, Handlebars};
|
||||||
|
use serde_json::value::{Map, Value as Json};
|
||||||
|
|
||||||
static SOURCE: &'static str = "<html>
|
static SOURCE: &'static str = "<html>
|
||||||
<head>
|
<head>
|
||||||
|
@ -30,14 +30,12 @@ fn make_data() -> Map<String, Json> {
|
||||||
|
|
||||||
let mut teams = Vec::new();
|
let mut teams = Vec::new();
|
||||||
|
|
||||||
for &(name, score) in
|
for &(name, score) in &[
|
||||||
&[
|
|
||||||
("Jiangsu", 43u16),
|
("Jiangsu", 43u16),
|
||||||
("Beijing", 27u16),
|
("Beijing", 27u16),
|
||||||
("Guangzhou", 22u16),
|
("Guangzhou", 22u16),
|
||||||
("Shandong", 12u16),
|
("Shandong", 12u16),
|
||||||
]
|
] {
|
||||||
{
|
|
||||||
let mut t = Map::new();
|
let mut t = Map::new();
|
||||||
t.insert("name".to_string(), to_json(&name));
|
t.insert("name".to_string(), to_json(&name));
|
||||||
t.insert("score".to_string(), to_json(&score));
|
t.insert("score".to_string(), to_json(&score));
|
||||||
|
@ -51,7 +49,8 @@ fn make_data() -> Map<String, Json> {
|
||||||
#[bench]
|
#[bench]
|
||||||
fn render_template(b: &mut test::Bencher) {
|
fn render_template(b: &mut test::Bencher) {
|
||||||
let mut handlebars = Handlebars::new();
|
let mut handlebars = Handlebars::new();
|
||||||
handlebars.register_template_string("table", SOURCE.to_string())
|
handlebars
|
||||||
|
.register_template_string("table", SOURCE.to_string())
|
||||||
.expect("Invalid template format");
|
.expect("Invalid template format");
|
||||||
|
|
||||||
let data = make_data();
|
let data = make_data();
|
||||||
|
|
|
@ -15,10 +15,22 @@ struct Entry {
|
||||||
fn render_template(b: &mut test::Bencher) {
|
fn render_template(b: &mut test::Bencher) {
|
||||||
let year = test::black_box("2015");
|
let year = test::black_box("2015");
|
||||||
let teams = test::black_box(vec![
|
let teams = test::black_box(vec![
|
||||||
Entry { name: "Jiangsu", score: 43 },
|
Entry {
|
||||||
Entry { name: "Beijing", score: 27 },
|
name: "Jiangsu",
|
||||||
Entry { name: "Guangzhou", score: 22 },
|
score: 43,
|
||||||
Entry { name: "Shandong", score: 12 },
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Beijing",
|
||||||
|
score: 27,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Guangzhou",
|
||||||
|
score: 22,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Shandong",
|
||||||
|
score: 12,
|
||||||
|
},
|
||||||
]);
|
]);
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
(html! {
|
(html! {
|
||||||
|
@ -40,6 +52,8 @@ fn render_template(b: &mut test::Bencher) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}).into_string().unwrap()
|
})
|
||||||
|
.into_string()
|
||||||
|
.unwrap()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
|
|
||||||
extern crate test;
|
extern crate test;
|
||||||
|
|
||||||
use liquid::ParserBuilder;
|
|
||||||
use liquid::model::{Object, Value};
|
use liquid::model::{Object, Value};
|
||||||
|
use liquid::ParserBuilder;
|
||||||
|
|
||||||
static SOURCE: &'static str = "<html>
|
static SOURCE: &'static str = "<html>
|
||||||
<head>
|
<head>
|
||||||
|
@ -30,7 +30,13 @@ fn make_team(name: &'static str, score: u16) -> Value {
|
||||||
|
|
||||||
#[bench]
|
#[bench]
|
||||||
fn render_template(b: &mut test::Bencher) {
|
fn render_template(b: &mut test::Bencher) {
|
||||||
let template = test::black_box(ParserBuilder::with_stdlib().build().unwrap().parse(SOURCE).unwrap());
|
let template = test::black_box(
|
||||||
|
ParserBuilder::with_stdlib()
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
.parse(SOURCE)
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
let mut globals = test::black_box({
|
let mut globals = test::black_box({
|
||||||
let mut globals = Object::new();
|
let mut globals = Object::new();
|
||||||
globals.insert("year".into(), Value::scalar(2015));
|
globals.insert("year".into(), Value::scalar(2015));
|
||||||
|
|
|
@ -15,10 +15,22 @@ struct Entry {
|
||||||
fn render_template(b: &mut test::Bencher) {
|
fn render_template(b: &mut test::Bencher) {
|
||||||
let year = test::black_box("2015");
|
let year = test::black_box("2015");
|
||||||
let teams = test::black_box(vec![
|
let teams = test::black_box(vec![
|
||||||
Entry { name: "Jiangsu", score: 43 },
|
Entry {
|
||||||
Entry { name: "Beijing", score: 27 },
|
name: "Jiangsu",
|
||||||
Entry { name: "Guangzhou", score: 22 },
|
score: 43,
|
||||||
Entry { name: "Shandong", score: 12 },
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Beijing",
|
||||||
|
score: 27,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Guangzhou",
|
||||||
|
score: 22,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Shandong",
|
||||||
|
score: 12,
|
||||||
|
},
|
||||||
]);
|
]);
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
html! {
|
html! {
|
||||||
|
|
|
@ -34,12 +34,27 @@ fn render_template(b: &mut test::Bencher) {
|
||||||
|
|
||||||
let context = test::black_box({
|
let context = test::black_box({
|
||||||
let mut context = Context::new();
|
let mut context = Context::new();
|
||||||
context.insert("teams", &[
|
context.insert(
|
||||||
Entry { name: "Jiangsu", score: 43 },
|
"teams",
|
||||||
Entry { name: "Beijing", score: 27 },
|
&[
|
||||||
Entry { name: "Guangzhou", score: 22 },
|
Entry {
|
||||||
Entry { name: "Shandong", score: 12 },
|
name: "Jiangsu",
|
||||||
]);
|
score: 43,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Beijing",
|
||||||
|
score: 27,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Guangzhou",
|
||||||
|
score: 22,
|
||||||
|
},
|
||||||
|
Entry {
|
||||||
|
name: "Shandong",
|
||||||
|
score: 12,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
);
|
||||||
context.insert("year", &"2015");
|
context.insert("year", &"2015");
|
||||||
context
|
context
|
||||||
});
|
});
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
#![feature(proc_macro_hygiene)]
|
#![feature(proc_macro_hygiene)]
|
||||||
|
|
||||||
use comrak::{self, Arena, ComrakOptions};
|
|
||||||
use comrak::nodes::{AstNode, NodeCodeBlock, NodeHeading, NodeHtmlBlock, NodeLink, NodeValue};
|
use comrak::nodes::{AstNode, NodeCodeBlock, NodeHeading, NodeHtmlBlock, NodeLink, NodeValue};
|
||||||
|
use comrak::{self, Arena, ComrakOptions};
|
||||||
use serde_json;
|
use serde_json;
|
||||||
use std::error::Error;
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
use std::error::Error;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io::{self, BufReader};
|
use std::io::{self, BufReader};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::string::FromUtf8Error;
|
use std::string::FromUtf8Error;
|
||||||
use syntect::parsing::SyntaxSet;
|
|
||||||
use syntect::highlighting::{Color, ThemeSet};
|
use syntect::highlighting::{Color, ThemeSet};
|
||||||
use syntect::html::highlighted_html_for_string;
|
use syntect::html::highlighted_html_for_string;
|
||||||
|
use syntect::parsing::SyntaxSet;
|
||||||
|
|
||||||
use crate::page::Page;
|
use crate::page::Page;
|
||||||
use crate::string_writer::StringWriter;
|
use crate::string_writer::StringWriter;
|
||||||
|
@ -24,12 +24,15 @@ mod views;
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
let args = env::args().collect::<Vec<_>>();
|
let args = env::args().collect::<Vec<_>>();
|
||||||
if args.len() >= 3 && &args[1] == "build-nav" && args[3..].iter().all(|arg| arg.contains(":")) {
|
if args.len() >= 3 && &args[1] == "build-nav" && args[3..].iter().all(|arg| arg.contains(":")) {
|
||||||
let entries = args[3..].iter().map(|arg| {
|
let entries = args[3..]
|
||||||
|
.iter()
|
||||||
|
.map(|arg| {
|
||||||
let mut splits = arg.splitn(2, ":");
|
let mut splits = arg.splitn(2, ":");
|
||||||
let slug = splits.next().unwrap();
|
let slug = splits.next().unwrap();
|
||||||
let input_path = splits.next().unwrap();
|
let input_path = splits.next().unwrap();
|
||||||
(slug, input_path)
|
(slug, input_path)
|
||||||
}).collect::<Vec<_>>();
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
build_nav(&entries, &args[2])
|
build_nav(&entries, &args[2])
|
||||||
} else if args.len() == 6 && &args[1] == "build-page" {
|
} else if args.len() == 6 && &args[1] == "build-page" {
|
||||||
build_page(&args[2], &args[3], &args[4], &args[5])
|
build_page(&args[2], &args[3], &args[4], &args[5])
|
||||||
|
@ -42,10 +45,13 @@ fn build_nav(entries: &[(&str, &str)], nav_path: &str) -> Result<(), Box<dyn Err
|
||||||
let arena = Arena::new();
|
let arena = Arena::new();
|
||||||
let options = comrak_options();
|
let options = comrak_options();
|
||||||
|
|
||||||
let nav = entries.iter().map(|&(slug, input_path)| {
|
let nav = entries
|
||||||
|
.iter()
|
||||||
|
.map(|&(slug, input_path)| {
|
||||||
let title = load_page_title(&arena, &options, input_path)?;
|
let title = load_page_title(&arena, &options, input_path)?;
|
||||||
Ok((slug, title))
|
Ok((slug, title))
|
||||||
}).collect::<io::Result<Vec<_>>>()?;
|
})
|
||||||
|
.collect::<io::Result<Vec<_>>>()?;
|
||||||
|
|
||||||
// Only write if different to avoid spurious rebuilds
|
// Only write if different to avoid spurious rebuilds
|
||||||
let old_string = fs::read_to_string(nav_path).unwrap_or(String::new());
|
let old_string = fs::read_to_string(nav_path).unwrap_or(String::new());
|
||||||
|
@ -73,12 +79,15 @@ fn build_page(
|
||||||
let arena = Arena::new();
|
let arena = Arena::new();
|
||||||
let options = comrak_options();
|
let options = comrak_options();
|
||||||
|
|
||||||
let nav = nav.iter().filter_map(|(slug, title)| {
|
let nav = nav
|
||||||
|
.iter()
|
||||||
|
.filter_map(|(slug, title)| {
|
||||||
title.as_ref().map(|title| {
|
title.as_ref().map(|title| {
|
||||||
let title = comrak::parse_document(&arena, title, &options);
|
let title = comrak::parse_document(&arena, title, &options);
|
||||||
(slug.as_str(), title)
|
(slug.as_str(), title)
|
||||||
})
|
})
|
||||||
}).collect::<Vec<_>>();
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let page = load_page(&arena, &options, input_path)?;
|
let page = load_page(&arena, &options, input_path)?;
|
||||||
let markup = views::main(&options, slug, page, &nav);
|
let markup = views::main(&options, slug, page, &nav);
|
||||||
|
@ -99,8 +108,7 @@ fn load_page<'a>(
|
||||||
lower_headings(page.content);
|
lower_headings(page.content);
|
||||||
rewrite_md_links(page.content)
|
rewrite_md_links(page.content)
|
||||||
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||||
highlight_code(page.content)
|
highlight_code(page.content).map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
||||||
.map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?;
|
|
||||||
|
|
||||||
Ok(page)
|
Ok(page)
|
||||||
}
|
}
|
||||||
|
@ -113,11 +121,7 @@ fn load_page_title<'a>(
|
||||||
let page = load_page_raw(arena, options, path)?;
|
let page = load_page_raw(arena, options, path)?;
|
||||||
let title = page.title.map(|title| {
|
let title = page.title.map(|title| {
|
||||||
let mut buffer = String::new();
|
let mut buffer = String::new();
|
||||||
comrak::format_commonmark(
|
comrak::format_commonmark(title, options, &mut StringWriter(&mut buffer)).unwrap();
|
||||||
title,
|
|
||||||
options,
|
|
||||||
&mut StringWriter(&mut buffer),
|
|
||||||
).unwrap();
|
|
||||||
buffer
|
buffer
|
||||||
});
|
});
|
||||||
Ok(title)
|
Ok(title)
|
||||||
|
@ -131,9 +135,7 @@ fn load_page_raw<'a>(
|
||||||
let buffer = fs::read_to_string(path)?;
|
let buffer = fs::read_to_string(path)?;
|
||||||
let content = comrak::parse_document(arena, &buffer, options);
|
let content = comrak::parse_document(arena, &buffer, options);
|
||||||
|
|
||||||
let title = content
|
let title = content.first_child().filter(|node| {
|
||||||
.first_child()
|
|
||||||
.filter(|node| {
|
|
||||||
let mut data = node.data.borrow_mut();
|
let mut data = node.data.borrow_mut();
|
||||||
if let NodeValue::Heading(NodeHeading { level: 1, .. }) = data.value {
|
if let NodeValue::Heading(NodeHeading { level: 1, .. }) = data.value {
|
||||||
node.detach();
|
node.detach();
|
||||||
|
@ -175,12 +177,18 @@ fn highlight_code<'a>(root: &'a AstNode<'a>) -> Result<(), FromUtf8Error> {
|
||||||
let ss = SyntaxSet::load_defaults_newlines();
|
let ss = SyntaxSet::load_defaults_newlines();
|
||||||
let ts = ThemeSet::load_defaults();
|
let ts = ThemeSet::load_defaults();
|
||||||
let mut theme = ts.themes["InspiredGitHub"].clone();
|
let mut theme = ts.themes["InspiredGitHub"].clone();
|
||||||
theme.settings.background = Some(Color { r: 0xff, g: 0xee, b: 0xff, a: 0xff });
|
theme.settings.background = Some(Color {
|
||||||
|
r: 0xff,
|
||||||
|
g: 0xee,
|
||||||
|
b: 0xff,
|
||||||
|
a: 0xff,
|
||||||
|
});
|
||||||
for node in root.descendants() {
|
for node in root.descendants() {
|
||||||
let mut data = node.data.borrow_mut();
|
let mut data = node.data.borrow_mut();
|
||||||
if let NodeValue::CodeBlock(NodeCodeBlock { info, literal, .. }) = &mut data.value {
|
if let NodeValue::CodeBlock(NodeCodeBlock { info, literal, .. }) = &mut data.value {
|
||||||
let info = String::from_utf8(mem::replace(info, Vec::new()))?;
|
let info = String::from_utf8(mem::replace(info, Vec::new()))?;
|
||||||
let syntax = ss.find_syntax_by_token(&info)
|
let syntax = ss
|
||||||
|
.find_syntax_by_token(&info)
|
||||||
.unwrap_or_else(|| ss.find_syntax_plain_text());
|
.unwrap_or_else(|| ss.find_syntax_plain_text());
|
||||||
let mut literal = String::from_utf8(mem::replace(literal, Vec::new()))?;
|
let mut literal = String::from_utf8(mem::replace(literal, Vec::new()))?;
|
||||||
if !literal.ends_with('\n') {
|
if !literal.ends_with('\n') {
|
||||||
|
|
|
@ -4,4 +4,3 @@ pub struct Page<'a> {
|
||||||
pub title: Option<&'a AstNode<'a>>,
|
pub title: Option<&'a AstNode<'a>>,
|
||||||
pub content: &'a AstNode<'a>,
|
pub content: &'a AstNode<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use comrak::{self, ComrakOptions};
|
|
||||||
use comrak::nodes::AstNode;
|
|
||||||
use crate::Page;
|
|
||||||
use crate::string_writer::StringWriter;
|
use crate::string_writer::StringWriter;
|
||||||
use maud::{DOCTYPE, Markup, PreEscaped, Render, html};
|
use crate::Page;
|
||||||
|
use comrak::nodes::AstNode;
|
||||||
|
use comrak::{self, ComrakOptions};
|
||||||
|
use maud::{html, Markup, PreEscaped, Render, DOCTYPE};
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
struct Comrak<'a>(&'a AstNode<'a>, &'a ComrakOptions);
|
struct Comrak<'a>(&'a AstNode<'a>, &'a ComrakOptions);
|
||||||
|
@ -22,7 +22,12 @@ impl<'a> Render for ComrakRemovePTags<'a> {
|
||||||
let mut buffer = String::new();
|
let mut buffer = String::new();
|
||||||
comrak::format_html(self.0, self.1, &mut StringWriter(&mut buffer)).unwrap();
|
comrak::format_html(self.0, self.1, &mut StringWriter(&mut buffer)).unwrap();
|
||||||
assert!(buffer.starts_with("<p>") && buffer.ends_with("</p>\n"));
|
assert!(buffer.starts_with("<p>") && buffer.ends_with("</p>\n"));
|
||||||
PreEscaped(buffer.trim_start_matches("<p>").trim_end_matches("</p>\n").to_string())
|
PreEscaped(
|
||||||
|
buffer
|
||||||
|
.trim_start_matches("<p>")
|
||||||
|
.trim_end_matches("</p>\n")
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,9 +9,12 @@
|
||||||
|
|
||||||
#![doc(html_root_url = "https://docs.rs/maud/0.22.0")]
|
#![doc(html_root_url = "https://docs.rs/maud/0.22.0")]
|
||||||
|
|
||||||
#[cfg(feature = "actix-web")] extern crate actix_web_dep;
|
#[cfg(feature = "actix-web")]
|
||||||
#[cfg(feature = "iron")] extern crate iron;
|
extern crate actix_web_dep;
|
||||||
#[cfg(feature = "rocket")] extern crate rocket;
|
#[cfg(feature = "iron")]
|
||||||
|
extern crate iron;
|
||||||
|
#[cfg(feature = "rocket")]
|
||||||
|
extern crate rocket;
|
||||||
|
|
||||||
use std::fmt::{self, Write};
|
use std::fmt::{self, Write};
|
||||||
|
|
||||||
|
@ -161,12 +164,12 @@ pub const DOCTYPE: PreEscaped<&'static str> = PreEscaped("<!DOCTYPE html>");
|
||||||
|
|
||||||
#[cfg(feature = "iron")]
|
#[cfg(feature = "iron")]
|
||||||
mod iron_support {
|
mod iron_support {
|
||||||
use std::io;
|
use crate::PreEscaped;
|
||||||
use iron::headers::ContentType;
|
use iron::headers::ContentType;
|
||||||
use iron::modifier::{Modifier, Set};
|
use iron::modifier::{Modifier, Set};
|
||||||
use iron::modifiers::Header;
|
use iron::modifiers::Header;
|
||||||
use iron::response::{Response, WriteBody};
|
use iron::response::{Response, WriteBody};
|
||||||
use crate::PreEscaped;
|
use std::io;
|
||||||
|
|
||||||
impl Modifier<Response> for PreEscaped<String> {
|
impl Modifier<Response> for PreEscaped<String> {
|
||||||
fn modify(self, response: &mut Response) {
|
fn modify(self, response: &mut Response) {
|
||||||
|
@ -185,11 +188,11 @@ mod iron_support {
|
||||||
|
|
||||||
#[cfg(feature = "rocket")]
|
#[cfg(feature = "rocket")]
|
||||||
mod rocket_support {
|
mod rocket_support {
|
||||||
|
use crate::PreEscaped;
|
||||||
use rocket::http::{ContentType, Status};
|
use rocket::http::{ContentType, Status};
|
||||||
use rocket::request::Request;
|
use rocket::request::Request;
|
||||||
use rocket::response::{Responder, Response};
|
use rocket::response::{Responder, Response};
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use crate::PreEscaped;
|
|
||||||
|
|
||||||
impl Responder<'static> for PreEscaped<String> {
|
impl Responder<'static> for PreEscaped<String> {
|
||||||
fn respond_to(self, _: &Request) -> Result<Response<'static>, Status> {
|
fn respond_to(self, _: &Request) -> Result<Response<'static>, Status> {
|
||||||
|
@ -204,7 +207,7 @@ mod rocket_support {
|
||||||
#[cfg(feature = "actix-web")]
|
#[cfg(feature = "actix-web")]
|
||||||
mod actix_support {
|
mod actix_support {
|
||||||
use crate::PreEscaped;
|
use crate::PreEscaped;
|
||||||
use actix_web_dep::{Responder, HttpResponse, HttpRequest, Error};
|
use actix_web_dep::{Error, HttpRequest, HttpResponse, Responder};
|
||||||
use futures::future::{ok, Ready};
|
use futures::future::{ok, Ready};
|
||||||
|
|
||||||
impl Responder for PreEscaped<String> {
|
impl Responder for PreEscaped<String> {
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#![feature(proc_macro_hygiene)]
|
#![feature(proc_macro_hygiene)]
|
||||||
|
|
||||||
// Make sure `std` is available but the prelude isn't
|
// Make sure `std` is available but the prelude isn't
|
||||||
#![no_std]
|
#![no_std]
|
||||||
extern crate std;
|
extern crate std;
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#![feature(proc_macro_hygiene)]
|
#![feature(proc_macro_hygiene)]
|
||||||
|
|
||||||
use maud::{Markup, html};
|
use maud::{html, Markup};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn literals() {
|
fn literals() {
|
||||||
|
@ -22,7 +22,8 @@ fn semicolons() {
|
||||||
"three";
|
"three";
|
||||||
;;;;;;;;;;;;;;;;;;;;;;;;
|
;;;;;;;;;;;;;;;;;;;;;;;;
|
||||||
"four";
|
"four";
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "onetwothreefour");
|
assert_eq!(s, "onetwothreefour");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,7 +35,8 @@ fn blocks() {
|
||||||
" ducks" " geese"
|
" ducks" " geese"
|
||||||
}
|
}
|
||||||
" swans"
|
" swans"
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "hello ducks geese swans");
|
assert_eq!(s, "hello ducks geese swans");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,10 +65,15 @@ fn simple_attributes() {
|
||||||
section id="midriff" {
|
section id="midriff" {
|
||||||
p class="hotpink" { "Hello!" }
|
p class="hotpink" { "Hello!" }
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
assert_eq!(s, concat!(
|
.into_string();
|
||||||
|
assert_eq!(
|
||||||
|
s,
|
||||||
|
concat!(
|
||||||
r#"<link rel="stylesheet" href="styles.css">"#,
|
r#"<link rel="stylesheet" href="styles.css">"#,
|
||||||
r#"<section id="midriff"><p class="hotpink">Hello!</p></section>"#));
|
r#"<section id="midriff"><p class="hotpink">Hello!</p></section>"#
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -83,17 +90,24 @@ fn toggle_empty_attributes() {
|
||||||
input checked?[false];
|
input checked?[false];
|
||||||
input checked?[rocks];
|
input checked?[rocks];
|
||||||
input checked?[!rocks];
|
input checked?[!rocks];
|
||||||
}).into_string();
|
})
|
||||||
assert_eq!(s, concat!(
|
.into_string();
|
||||||
|
assert_eq!(
|
||||||
|
s,
|
||||||
|
concat!(
|
||||||
r#"<input checked>"#,
|
r#"<input checked>"#,
|
||||||
r#"<input>"#,
|
r#"<input>"#,
|
||||||
r#"<input checked>"#,
|
r#"<input checked>"#,
|
||||||
r#"<input>"#));
|
r#"<input>"#
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn toggle_empty_attributes_braces() {
|
fn toggle_empty_attributes_braces() {
|
||||||
struct Maud { rocks: bool }
|
struct Maud {
|
||||||
|
rocks: bool,
|
||||||
|
}
|
||||||
let s = html!(input checked?[Maud { rocks: true }.rocks] /).into_string();
|
let s = html!(input checked?[Maud { rocks: true }.rocks] /).into_string();
|
||||||
assert_eq!(s, r#"<input checked>"#);
|
assert_eq!(s, r#"<input checked>"#);
|
||||||
}
|
}
|
||||||
|
@ -101,10 +115,14 @@ fn toggle_empty_attributes_braces() {
|
||||||
#[test]
|
#[test]
|
||||||
fn colons_in_names() {
|
fn colons_in_names() {
|
||||||
let s = html!(pon-pon:controls-alpha { a on:click="yay()" { "Yay!" } }).into_string();
|
let s = html!(pon-pon:controls-alpha { a on:click="yay()" { "Yay!" } }).into_string();
|
||||||
assert_eq!(s, concat!(
|
assert_eq!(
|
||||||
|
s,
|
||||||
|
concat!(
|
||||||
r#"<pon-pon:controls-alpha>"#,
|
r#"<pon-pon:controls-alpha>"#,
|
||||||
r#"<a on:click="yay()">Yay!</a>"#,
|
r#"<a on:click="yay()">Yay!</a>"#,
|
||||||
r#"</pon-pon:controls-alpha>"#));
|
r#"</pon-pon:controls-alpha>"#
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[rustfmt::skip::macros(html)]
|
#[rustfmt::skip::macros(html)]
|
||||||
|
@ -155,15 +173,29 @@ fn toggle_classes() {
|
||||||
fn test(is_cupcake: bool, is_muffin: bool) -> Markup {
|
fn test(is_cupcake: bool, is_muffin: bool) -> Markup {
|
||||||
html!(p.cupcake[is_cupcake].muffin[is_muffin] { "Testing!" })
|
html!(p.cupcake[is_cupcake].muffin[is_muffin] { "Testing!" })
|
||||||
}
|
}
|
||||||
assert_eq!(test(true, true).into_string(), r#"<p class="cupcake muffin">Testing!</p>"#);
|
assert_eq!(
|
||||||
assert_eq!(test(false, true).into_string(), r#"<p class=" muffin">Testing!</p>"#);
|
test(true, true).into_string(),
|
||||||
assert_eq!(test(true, false).into_string(), r#"<p class="cupcake">Testing!</p>"#);
|
r#"<p class="cupcake muffin">Testing!</p>"#
|
||||||
assert_eq!(test(false, false).into_string(), r#"<p class="">Testing!</p>"#);
|
);
|
||||||
|
assert_eq!(
|
||||||
|
test(false, true).into_string(),
|
||||||
|
r#"<p class=" muffin">Testing!</p>"#
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
test(true, false).into_string(),
|
||||||
|
r#"<p class="cupcake">Testing!</p>"#
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
test(false, false).into_string(),
|
||||||
|
r#"<p class="">Testing!</p>"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn toggle_classes_braces() {
|
fn toggle_classes_braces() {
|
||||||
struct Maud { rocks: bool }
|
struct Maud {
|
||||||
|
rocks: bool,
|
||||||
|
}
|
||||||
let s = html!(p.rocks[Maud { rocks: true }.rocks] { "Awesome!" }).into_string();
|
let s = html!(p.rocks[Maud { rocks: true }.rocks] { "Awesome!" }).into_string();
|
||||||
assert_eq!(s, r#"<p class="rocks">Awesome!</p>"#);
|
assert_eq!(s, r#"<p class="rocks">Awesome!</p>"#);
|
||||||
}
|
}
|
||||||
|
@ -181,8 +213,14 @@ fn mixed_classes() {
|
||||||
fn test(is_muffin: bool) -> Markup {
|
fn test(is_muffin: bool) -> Markup {
|
||||||
html!(p.cupcake.muffin[is_muffin].lamington { "Testing!" })
|
html!(p.cupcake.muffin[is_muffin].lamington { "Testing!" })
|
||||||
}
|
}
|
||||||
assert_eq!(test(true).into_string(), r#"<p class="cupcake lamington muffin">Testing!</p>"#);
|
assert_eq!(
|
||||||
assert_eq!(test(false).into_string(), r#"<p class="cupcake lamington">Testing!</p>"#);
|
test(true).into_string(),
|
||||||
|
r#"<p class="cupcake lamington muffin">Testing!</p>"#
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
test(false).into_string(),
|
||||||
|
r#"<p class="cupcake lamington">Testing!</p>"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -200,7 +238,10 @@ fn id_string() {
|
||||||
#[test]
|
#[test]
|
||||||
fn classes_attrs_ids_mixed_up() {
|
fn classes_attrs_ids_mixed_up() {
|
||||||
let s = html!(p { "Hi, " span.name.here lang="en" #thing { "Lyra" } "!" }).into_string();
|
let s = html!(p { "Hi, " span.name.here lang="en" #thing { "Lyra" } "!" }).into_string();
|
||||||
assert_eq!(s, r#"<p>Hi, <span class="name here" id="thing" lang="en">Lyra</span>!</p>"#);
|
assert_eq!(
|
||||||
|
s,
|
||||||
|
r#"<p>Hi, <span class="name here" id="thing" lang="en">Lyra</span>!</p>"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -218,11 +259,17 @@ fn div_shorthand_id() {
|
||||||
#[test]
|
#[test]
|
||||||
fn div_shorthand_class_with_attrs() {
|
fn div_shorthand_class_with_attrs() {
|
||||||
let s = html!(.awesome-class contenteditable? dir="rtl" #unique-id {}).into_string();
|
let s = html!(.awesome-class contenteditable? dir="rtl" #unique-id {}).into_string();
|
||||||
assert_eq!(s, r#"<div class="awesome-class" id="unique-id" contenteditable dir="rtl"></div>"#);
|
assert_eq!(
|
||||||
|
s,
|
||||||
|
r#"<div class="awesome-class" id="unique-id" contenteditable dir="rtl"></div>"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn div_shorthand_id_with_attrs() {
|
fn div_shorthand_id_with_attrs() {
|
||||||
let s = html!(#unique-id contenteditable? dir="rtl" .awesome-class {}).into_string();
|
let s = html!(#unique-id contenteditable? dir="rtl" .awesome-class {}).into_string();
|
||||||
assert_eq!(s, r#"<div class="awesome-class" id="unique-id" contenteditable dir="rtl"></div>"#);
|
assert_eq!(
|
||||||
|
s,
|
||||||
|
r#"<div class="awesome-class" id="unique-id" contenteditable dir="rtl"></div>"#
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,8 @@ fn if_expr() {
|
||||||
} @else {
|
} @else {
|
||||||
"oh noes"
|
"oh noes"
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, name);
|
assert_eq!(s, name);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,13 +26,13 @@ fn if_expr_in_class() {
|
||||||
for &(chocolate_milk, expected) in &[
|
for &(chocolate_milk, expected) in &[
|
||||||
(0, r#"<p class="empty">Chocolate milk</p>"#),
|
(0, r#"<p class="empty">Chocolate milk</p>"#),
|
||||||
(1, r#"<p class="full">Chocolate milk</p>"#),
|
(1, r#"<p class="full">Chocolate milk</p>"#),
|
||||||
]
|
] {
|
||||||
{
|
|
||||||
let s = html! {
|
let s = html! {
|
||||||
p.@if chocolate_milk == 0 { "empty" } @else { "full" } {
|
p.@if chocolate_milk == 0 { "empty" } @else { "full" } {
|
||||||
"Chocolate milk"
|
"Chocolate milk"
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, expected);
|
assert_eq!(s, expected);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -45,7 +46,8 @@ fn if_let() {
|
||||||
} @else {
|
} @else {
|
||||||
"oh noes"
|
"oh noes"
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, output);
|
assert_eq!(s, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,7 +61,8 @@ fn while_expr() {
|
||||||
li { (numbers.next().unwrap()) }
|
li { (numbers.next().unwrap()) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "<ul><li>0</li><li>1</li><li>2</li></ul>");
|
assert_eq!(s, "<ul><li>0</li><li>1</li><li>2</li></ul>");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +75,8 @@ fn while_let_expr() {
|
||||||
li { (n) }
|
li { (n) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "<ul><li>0</li><li>1</li><li>2</li></ul>");
|
assert_eq!(s, "<ul><li>0</li><li>1</li><li>2</li></ul>");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,13 +89,18 @@ fn for_expr() {
|
||||||
li { (pony) }
|
li { (pony) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
assert_eq!(s, concat!(
|
.into_string();
|
||||||
|
assert_eq!(
|
||||||
|
s,
|
||||||
|
concat!(
|
||||||
"<ul>",
|
"<ul>",
|
||||||
"<li>Apple Bloom</li>",
|
"<li>Apple Bloom</li>",
|
||||||
"<li>Scootaloo</li>",
|
"<li>Scootaloo</li>",
|
||||||
"<li>Sweetie Belle</li>",
|
"<li>Sweetie Belle</li>",
|
||||||
"</ul>"));
|
"</ul>"
|
||||||
|
)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -106,7 +115,8 @@ fn match_expr() {
|
||||||
"oh noes"
|
"oh noes"
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, output);
|
assert_eq!(s, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -119,7 +129,8 @@ fn match_expr_without_delims() {
|
||||||
Some(value) => (value),
|
Some(value) => (value),
|
||||||
None => span { "oh noes" },
|
None => span { "oh noes" },
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, output);
|
assert_eq!(s, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -132,7 +143,8 @@ fn match_no_trailing_comma() {
|
||||||
Some(value) => { (value) }
|
Some(value) => { (value) }
|
||||||
None => span { "oh noes" }
|
None => span { "oh noes" }
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, output);
|
assert_eq!(s, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -146,21 +158,27 @@ fn match_expr_with_guards() {
|
||||||
Some(value) => (value),
|
Some(value) => (value),
|
||||||
None => "none",
|
None => "none",
|
||||||
}
|
}
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, output);
|
assert_eq!(s, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn match_in_attribute() {
|
fn match_in_attribute() {
|
||||||
for &(input, output) in &[(1, "<span class=\"one\">1</span>"), (2, "<span class=\"two\">2</span>"), (3, "<span class=\"many\">3</span>")] {
|
for &(input, output) in &[
|
||||||
|
(1, "<span class=\"one\">1</span>"),
|
||||||
|
(2, "<span class=\"two\">2</span>"),
|
||||||
|
(3, "<span class=\"many\">3</span>"),
|
||||||
|
] {
|
||||||
let s = html! {
|
let s = html! {
|
||||||
span class=@match input {
|
span class=@match input {
|
||||||
1 => "one",
|
1 => "one",
|
||||||
2 => "two",
|
2 => "two",
|
||||||
_ => "many",
|
_ => "many",
|
||||||
} { (input) }
|
} { (input) }
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, output);
|
assert_eq!(s, output);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -170,7 +188,8 @@ fn let_expr() {
|
||||||
let s = html! {
|
let s = html! {
|
||||||
@let x = 42;
|
@let x = 42;
|
||||||
"I have " (x) " cupcakes!"
|
"I have " (x) " cupcakes!"
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "I have 42 cupcakes!");
|
assert_eq!(s, "I have 42 cupcakes!");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -183,10 +202,12 @@ fn let_lexical_scope() {
|
||||||
"Twilight thought I had " (x) " cupcakes, "
|
"Twilight thought I had " (x) " cupcakes, "
|
||||||
}
|
}
|
||||||
"but I only had " (x) "."
|
"but I only had " (x) "."
|
||||||
}.into_string();
|
}
|
||||||
assert_eq!(s, concat!(
|
.into_string();
|
||||||
"Twilight thought I had 99 cupcakes, ",
|
assert_eq!(
|
||||||
"but I only had 42."));
|
s,
|
||||||
|
concat!("Twilight thought I had 99 cupcakes, ", "but I only had 42.")
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -194,6 +215,7 @@ fn let_type_ascription() {
|
||||||
let s = html! {
|
let s = html! {
|
||||||
@let mut x: Box<dyn Iterator<Item=u32>> = Box::new(vec![42].into_iter());
|
@let mut x: Box<dyn Iterator<Item=u32>> = Box::new(vec![42].into_iter());
|
||||||
"I have " (x.next().unwrap()) " cupcakes!"
|
"I have " (x.next().unwrap()) " cupcakes!"
|
||||||
}.into_string();
|
}
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "I have 42 cupcakes!");
|
assert_eq!(s, "I have 42 cupcakes!");
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,8 @@ fn blocks() {
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
})
|
})
|
||||||
}).into_string();
|
})
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "3628800");
|
assert_eq!(s, "3628800");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +96,8 @@ fn structs() {
|
||||||
};
|
};
|
||||||
let s = html!({
|
let s = html!({
|
||||||
"Name: " (pinkie.name) ". Rating: " (pinkie.repugnance())
|
"Name: " (pinkie.name) ". Rating: " (pinkie.repugnance())
|
||||||
}).into_string();
|
})
|
||||||
|
.into_string();
|
||||||
assert_eq!(s, "Name: Pinkie Pie. Rating: 1");
|
assert_eq!(s, "Name: Pinkie Pie. Rating: 1");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -56,8 +56,8 @@ impl<'a> fmt::Write for Escaper<'a> {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use std::fmt::Write;
|
|
||||||
use crate::Escaper;
|
use crate::Escaper;
|
||||||
|
use std::fmt::Write;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn it_works() {
|
fn it_works() {
|
||||||
|
|
|
@ -42,19 +42,20 @@ impl Markup {
|
||||||
Markup::Literal { span, .. } => span,
|
Markup::Literal { span, .. } => span,
|
||||||
Markup::Symbol { ref symbol } => span_tokens(symbol.clone()),
|
Markup::Symbol { ref symbol } => span_tokens(symbol.clone()),
|
||||||
Markup::Splice { outer_span, .. } => outer_span,
|
Markup::Splice { outer_span, .. } => outer_span,
|
||||||
Markup::Element { ref name, ref body, .. } => {
|
Markup::Element {
|
||||||
|
ref name, ref body, ..
|
||||||
|
} => {
|
||||||
let name_span = span_tokens(name.clone());
|
let name_span = span_tokens(name.clone());
|
||||||
name_span.join_range(body.span())
|
name_span.join_range(body.span())
|
||||||
},
|
}
|
||||||
Markup::Let { at_span, ref tokens } => {
|
Markup::Let {
|
||||||
at_span.join_range(span_tokens(tokens.clone()))
|
at_span,
|
||||||
},
|
ref tokens,
|
||||||
Markup::Special { ref segments } => {
|
} => at_span.join_range(span_tokens(tokens.clone())),
|
||||||
join_ranges(segments.iter().map(Special::span))
|
Markup::Special { ref segments } => join_ranges(segments.iter().map(Special::span)),
|
||||||
},
|
Markup::Match {
|
||||||
Markup::Match { at_span, arms_span, .. } => {
|
at_span, arms_span, ..
|
||||||
at_span.join_range(arms_span)
|
} => at_span.join_range(arms_span),
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -80,7 +81,11 @@ pub enum Attr {
|
||||||
impl Attr {
|
impl Attr {
|
||||||
pub fn span(&self) -> SpanRange {
|
pub fn span(&self) -> SpanRange {
|
||||||
match *self {
|
match *self {
|
||||||
Attr::Class { dot_span, ref name, ref toggler } => {
|
Attr::Class {
|
||||||
|
dot_span,
|
||||||
|
ref name,
|
||||||
|
ref toggler,
|
||||||
|
} => {
|
||||||
let name_span = name.span();
|
let name_span = name.span();
|
||||||
let dot_name_span = dot_span.join_range(name_span);
|
let dot_name_span = dot_span.join_range(name_span);
|
||||||
if let Some(toggler) = toggler {
|
if let Some(toggler) = toggler {
|
||||||
|
@ -88,11 +93,14 @@ impl Attr {
|
||||||
} else {
|
} else {
|
||||||
dot_name_span
|
dot_name_span
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Attr::Id { hash_span, ref name } => {
|
Attr::Id {
|
||||||
|
hash_span,
|
||||||
|
ref name,
|
||||||
|
} => {
|
||||||
let name_span = name.span();
|
let name_span = name.span();
|
||||||
hash_span.join_range(name_span)
|
hash_span.join_range(name_span)
|
||||||
},
|
}
|
||||||
Attr::Attribute { ref attribute } => attribute.span(),
|
Attr::Attribute { ref attribute } => attribute.span(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -158,12 +166,8 @@ impl Attribute {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum AttrType {
|
pub enum AttrType {
|
||||||
Normal {
|
Normal { value: Markup },
|
||||||
value: Markup,
|
Empty { toggler: Option<Toggler> },
|
||||||
},
|
|
||||||
Empty {
|
|
||||||
toggler: Option<Toggler>,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AttrType {
|
impl AttrType {
|
||||||
|
|
|
@ -1,15 +1,7 @@
|
||||||
use maud_htmlescape::Escaper;
|
use maud_htmlescape::Escaper;
|
||||||
use proc_macro2::{
|
use proc_macro2::{Delimiter, Group, Ident, Literal, Span, TokenStream, TokenTree};
|
||||||
Delimiter,
|
|
||||||
Group,
|
|
||||||
Literal,
|
|
||||||
Span,
|
|
||||||
Ident,
|
|
||||||
TokenStream,
|
|
||||||
TokenTree,
|
|
||||||
};
|
|
||||||
use quote::quote;
|
|
||||||
use proc_macro_error::SpanRange;
|
use proc_macro_error::SpanRange;
|
||||||
|
use quote::quote;
|
||||||
|
|
||||||
use crate::ast::*;
|
use crate::ast::*;
|
||||||
|
|
||||||
|
@ -40,13 +32,22 @@ impl Generator {
|
||||||
|
|
||||||
fn markup(&self, markup: Markup, build: &mut Builder) {
|
fn markup(&self, markup: Markup, build: &mut Builder) {
|
||||||
match markup {
|
match markup {
|
||||||
Markup::Block(Block { markups, outer_span }) => {
|
Markup::Block(Block {
|
||||||
if markups.iter().any(|markup| matches!(*markup, Markup::Let { .. })) {
|
markups,
|
||||||
build.push_tokens(self.block(Block { markups, outer_span }));
|
outer_span,
|
||||||
|
}) => {
|
||||||
|
if markups
|
||||||
|
.iter()
|
||||||
|
.any(|markup| matches!(*markup, Markup::Let { .. }))
|
||||||
|
{
|
||||||
|
build.push_tokens(self.block(Block {
|
||||||
|
markups,
|
||||||
|
outer_span,
|
||||||
|
}));
|
||||||
} else {
|
} else {
|
||||||
self.markups(markups, build);
|
self.markups(markups, build);
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Markup::Literal { content, .. } => build.push_escaped(&content),
|
Markup::Literal { content, .. } => build.push_escaped(&content),
|
||||||
Markup::Symbol { symbol } => self.name(symbol, build),
|
Markup::Symbol { symbol } => self.name(symbol, build),
|
||||||
Markup::Splice { expr, .. } => build.push_tokens(self.splice(expr)),
|
Markup::Splice { expr, .. } => build.push_tokens(self.splice(expr)),
|
||||||
|
@ -56,22 +57,30 @@ impl Generator {
|
||||||
for segment in segments {
|
for segment in segments {
|
||||||
build.push_tokens(self.special(segment));
|
build.push_tokens(self.special(segment));
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Markup::Match { head, arms, arms_span, .. } => {
|
Markup::Match {
|
||||||
|
head,
|
||||||
|
arms,
|
||||||
|
arms_span,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
build.push_tokens({
|
build.push_tokens({
|
||||||
let body = arms
|
let body = arms.into_iter().map(|arm| self.match_arm(arm)).collect();
|
||||||
.into_iter()
|
|
||||||
.map(|arm| self.match_arm(arm))
|
|
||||||
.collect();
|
|
||||||
let mut body = TokenTree::Group(Group::new(Delimiter::Brace, body));
|
let mut body = TokenTree::Group(Group::new(Delimiter::Brace, body));
|
||||||
body.set_span(arms_span.collapse());
|
body.set_span(arms_span.collapse());
|
||||||
quote!(#head #body)
|
quote!(#head #body)
|
||||||
});
|
});
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn block(&self, Block { markups, outer_span }: Block) -> TokenStream {
|
fn block(
|
||||||
|
&self,
|
||||||
|
Block {
|
||||||
|
markups,
|
||||||
|
outer_span,
|
||||||
|
}: Block,
|
||||||
|
) -> TokenStream {
|
||||||
let mut build = self.builder();
|
let mut build = self.builder();
|
||||||
self.markups(markups, &mut build);
|
self.markups(markups, &mut build);
|
||||||
let mut block = TokenTree::Group(Group::new(Delimiter::Brace, build.finish()));
|
let mut block = TokenTree::Group(Group::new(Delimiter::Brace, build.finish()));
|
||||||
|
@ -93,13 +102,7 @@ impl Generator {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn element(
|
fn element(&self, name: TokenStream, attrs: Attrs, body: ElementBody, build: &mut Builder) {
|
||||||
&self,
|
|
||||||
name: TokenStream,
|
|
||||||
attrs: Attrs,
|
|
||||||
body: ElementBody,
|
|
||||||
build: &mut Builder,
|
|
||||||
) {
|
|
||||||
build.push_str("<");
|
build.push_str("<");
|
||||||
self.name(name.clone(), build);
|
self.name(name.clone(), build);
|
||||||
self.attrs(attrs, build);
|
self.attrs(attrs, build);
|
||||||
|
@ -113,7 +116,10 @@ impl Generator {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn name(&self, name: TokenStream, build: &mut Builder) {
|
fn name(&self, name: TokenStream, build: &mut Builder) {
|
||||||
let string = name.into_iter().map(|token| token.to_string()).collect::<String>();
|
let string = name
|
||||||
|
.into_iter()
|
||||||
|
.map(|token| token.to_string())
|
||||||
|
.collect::<String>();
|
||||||
build.push_escaped(&string);
|
build.push_escaped(&string);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,12 +132,14 @@ impl Generator {
|
||||||
build.push_str("=\"");
|
build.push_str("=\"");
|
||||||
self.markup(value, build);
|
self.markup(value, build);
|
||||||
build.push_str("\"");
|
build.push_str("\"");
|
||||||
},
|
}
|
||||||
AttrType::Empty { toggler: None } => {
|
AttrType::Empty { toggler: None } => {
|
||||||
build.push_str(" ");
|
build.push_str(" ");
|
||||||
self.name(name, build);
|
self.name(name, build);
|
||||||
},
|
}
|
||||||
AttrType::Empty { toggler: Some(toggler) } => {
|
AttrType::Empty {
|
||||||
|
toggler: Some(toggler),
|
||||||
|
} => {
|
||||||
let head = desugar_toggler(toggler);
|
let head = desugar_toggler(toggler);
|
||||||
build.push_tokens({
|
build.push_tokens({
|
||||||
let mut build = self.builder();
|
let mut build = self.builder();
|
||||||
|
@ -140,7 +148,7 @@ impl Generator {
|
||||||
let body = build.finish();
|
let body = build.finish();
|
||||||
quote!(#head { #body })
|
quote!(#head { #body })
|
||||||
})
|
})
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -171,7 +179,7 @@ fn desugar_attrs(attrs: Attrs) -> Vec<Attribute> {
|
||||||
} else {
|
} else {
|
||||||
classes_static.push(name);
|
classes_static.push(name);
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
Attr::Id { name, .. } => ids.push(name),
|
Attr::Id { name, .. } => ids.push(name),
|
||||||
Attr::Attribute { attribute } => attributes.push(attribute),
|
Attr::Attribute { attribute } => attributes.push(attribute),
|
||||||
}
|
}
|
||||||
|
@ -201,7 +209,11 @@ fn desugar_classes_or_ids(
|
||||||
};
|
};
|
||||||
let head = desugar_toggler(toggler);
|
let head = desugar_toggler(toggler);
|
||||||
markups.push(Markup::Special {
|
markups.push(Markup::Special {
|
||||||
segments: vec![Special { at_span: SpanRange::call_site(), head, body }],
|
segments: vec![Special {
|
||||||
|
at_span: SpanRange::call_site(),
|
||||||
|
head,
|
||||||
|
body,
|
||||||
|
}],
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
Some(Attribute {
|
Some(Attribute {
|
||||||
|
@ -228,7 +240,12 @@ fn prepend_leading_space(name: Markup, leading_space: &mut bool) -> Vec<Markup>
|
||||||
markups
|
markups
|
||||||
}
|
}
|
||||||
|
|
||||||
fn desugar_toggler(Toggler { mut cond, cond_span }: Toggler) -> TokenStream {
|
fn desugar_toggler(
|
||||||
|
Toggler {
|
||||||
|
mut cond,
|
||||||
|
cond_span,
|
||||||
|
}: Toggler,
|
||||||
|
) -> TokenStream {
|
||||||
// If the expression contains an opening brace `{`,
|
// If the expression contains an opening brace `{`,
|
||||||
// wrap it in parentheses to avoid parse errors
|
// wrap it in parentheses to avoid parse errors
|
||||||
if cond.clone().into_iter().any(is_braced_block) {
|
if cond.clone().into_iter().any(is_braced_block) {
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#![feature(proc_macro_hygiene)]
|
#![feature(proc_macro_hygiene)]
|
||||||
|
|
||||||
#![doc(html_root_url = "https://docs.rs/maud_macros/0.22.0")]
|
#![doc(html_root_url = "https://docs.rs/maud_macros/0.22.0")]
|
||||||
|
|
||||||
// TokenStream values are reference counted, and the mental overhead of tracking
|
// TokenStream values are reference counted, and the mental overhead of tracking
|
||||||
// lifetimes outweighs the marginal gains from explicit borrowing
|
// lifetimes outweighs the marginal gains from explicit borrowing
|
||||||
#![allow(clippy::needless_pass_by_value)]
|
#![allow(clippy::needless_pass_by_value)]
|
||||||
|
@ -13,8 +11,8 @@ mod generate;
|
||||||
mod parse;
|
mod parse;
|
||||||
|
|
||||||
use proc_macro2::{Ident, TokenStream, TokenTree};
|
use proc_macro2::{Ident, TokenStream, TokenTree};
|
||||||
|
use proc_macro_error::proc_macro_error;
|
||||||
use quote::quote;
|
use quote::quote;
|
||||||
use proc_macro_error::{proc_macro_error};
|
|
||||||
|
|
||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
#[proc_macro_error]
|
#[proc_macro_error]
|
||||||
|
@ -32,7 +30,10 @@ pub fn html_debug(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||||
|
|
||||||
fn expand(input: TokenStream) -> TokenStream {
|
fn expand(input: TokenStream) -> TokenStream {
|
||||||
// TODO: call `proc_macro2::Span::mixed_site()` directly when Rust 1.45 is stable
|
// TODO: call `proc_macro2::Span::mixed_site()` directly when Rust 1.45 is stable
|
||||||
let output_ident = TokenTree::Ident(Ident::new("__maud_output", proc_macro::Span::mixed_site().into()));
|
let output_ident = TokenTree::Ident(Ident::new(
|
||||||
|
"__maud_output",
|
||||||
|
proc_macro::Span::mixed_site().into(),
|
||||||
|
));
|
||||||
// Heuristic: the size of the resulting markup tends to correlate with the
|
// Heuristic: the size of the resulting markup tends to correlate with the
|
||||||
// code size of the template itself
|
// code size of the template itself
|
||||||
let size_hint = input.to_string().len();
|
let size_hint = input.to_string().len();
|
||||||
|
|
|
@ -1,17 +1,9 @@
|
||||||
use proc_macro2::{
|
use proc_macro2::{Delimiter, Ident, Literal, Spacing, Span, TokenStream, TokenTree};
|
||||||
Delimiter,
|
|
||||||
Ident,
|
|
||||||
Literal,
|
|
||||||
Spacing,
|
|
||||||
Span,
|
|
||||||
TokenStream,
|
|
||||||
TokenTree,
|
|
||||||
};
|
|
||||||
use proc_macro_error::{abort, abort_call_site, SpanRange};
|
use proc_macro_error::{abort, abort_call_site, SpanRange};
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
use syn::{LitStr, parse_str};
|
use syn::{parse_str, LitStr};
|
||||||
|
|
||||||
use crate::ast;
|
use crate::ast;
|
||||||
|
|
||||||
|
@ -83,14 +75,13 @@ impl Parser {
|
||||||
match self.peek2() {
|
match self.peek2() {
|
||||||
None => break,
|
None => break,
|
||||||
Some((TokenTree::Punct(ref punct), _)) if punct.as_char() == ';' => self.advance(),
|
Some((TokenTree::Punct(ref punct), _)) if punct.as_char() == ';' => self.advance(),
|
||||||
Some((
|
Some((TokenTree::Punct(ref punct), Some(TokenTree::Ident(ref ident))))
|
||||||
TokenTree::Punct(ref punct),
|
if punct.as_char() == '@' && *ident == "let" =>
|
||||||
Some(TokenTree::Ident(ref ident)),
|
{
|
||||||
)) if punct.as_char() == '@' && *ident == "let" => {
|
|
||||||
self.advance2();
|
self.advance2();
|
||||||
let keyword = TokenTree::Ident(ident.clone());
|
let keyword = TokenTree::Ident(ident.clone());
|
||||||
result.push(self.let_expr(punct.span(), keyword));
|
result.push(self.let_expr(punct.span(), keyword));
|
||||||
},
|
}
|
||||||
_ => result.push(self.markup()),
|
_ => result.push(self.markup()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -103,14 +94,14 @@ impl Parser {
|
||||||
Some(token) => token,
|
Some(token) => token,
|
||||||
None => {
|
None => {
|
||||||
abort_call_site!("unexpected end of input");
|
abort_call_site!("unexpected end of input");
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
let markup = match token {
|
let markup = match token {
|
||||||
// Literal
|
// Literal
|
||||||
TokenTree::Literal(lit) => {
|
TokenTree::Literal(lit) => {
|
||||||
self.advance();
|
self.advance();
|
||||||
self.literal(&lit)
|
self.literal(&lit)
|
||||||
},
|
}
|
||||||
// Special form
|
// Special form
|
||||||
TokenTree::Punct(ref punct) if punct.as_char() == '@' => {
|
TokenTree::Punct(ref punct) if punct.as_char() == '@' => {
|
||||||
self.advance();
|
self.advance();
|
||||||
|
@ -123,25 +114,31 @@ impl Parser {
|
||||||
let mut segments = Vec::new();
|
let mut segments = Vec::new();
|
||||||
self.if_expr(at_span, vec![keyword], &mut segments);
|
self.if_expr(at_span, vec![keyword], &mut segments);
|
||||||
ast::Markup::Special { segments }
|
ast::Markup::Special { segments }
|
||||||
},
|
}
|
||||||
"while" => self.while_expr(at_span, keyword),
|
"while" => self.while_expr(at_span, keyword),
|
||||||
"for" => self.for_expr(at_span, keyword),
|
"for" => self.for_expr(at_span, keyword),
|
||||||
"match" => self.match_expr(at_span, keyword),
|
"match" => self.match_expr(at_span, keyword),
|
||||||
"let" => {
|
"let" => {
|
||||||
let span = SpanRange { first: at_span, last: ident.span() };
|
let span = SpanRange {
|
||||||
|
first: at_span,
|
||||||
|
last: ident.span(),
|
||||||
|
};
|
||||||
abort!(span, "`@let` only works inside a block");
|
abort!(span, "`@let` only works inside a block");
|
||||||
},
|
}
|
||||||
other => {
|
other => {
|
||||||
let span = SpanRange { first: at_span, last: ident.span() };
|
let span = SpanRange {
|
||||||
|
first: at_span,
|
||||||
|
last: ident.span(),
|
||||||
|
};
|
||||||
abort!(span, "unknown keyword `@{}`", other);
|
abort!(span, "unknown keyword `@{}`", other);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
abort!(at_span, "expected keyword after `@`");
|
abort!(at_span, "expected keyword after `@`");
|
||||||
},
|
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
|
}
|
||||||
// Element
|
// Element
|
||||||
TokenTree::Ident(ident) => {
|
TokenTree::Ident(ident) => {
|
||||||
let ident_string = ident.to_string();
|
let ident_string = ident.to_string();
|
||||||
|
@ -161,26 +158,29 @@ impl Parser {
|
||||||
// already seen an `Ident`
|
// already seen an `Ident`
|
||||||
let name = self.try_namespaced_name().expect("identifier");
|
let name = self.try_namespaced_name().expect("identifier");
|
||||||
self.element(name)
|
self.element(name)
|
||||||
},
|
}
|
||||||
// Div element shorthand
|
// Div element shorthand
|
||||||
TokenTree::Punct(ref punct) if punct.as_char() == '.' || punct.as_char() == '#' => {
|
TokenTree::Punct(ref punct) if punct.as_char() == '.' || punct.as_char() == '#' => {
|
||||||
let name = TokenTree::Ident(Ident::new("div", punct.span()));
|
let name = TokenTree::Ident(Ident::new("div", punct.span()));
|
||||||
self.element(name.into())
|
self.element(name.into())
|
||||||
},
|
}
|
||||||
// Splice
|
// Splice
|
||||||
TokenTree::Group(ref group) if group.delimiter() == Delimiter::Parenthesis => {
|
TokenTree::Group(ref group) if group.delimiter() == Delimiter::Parenthesis => {
|
||||||
self.advance();
|
self.advance();
|
||||||
ast::Markup::Splice { expr: group.stream(), outer_span: SpanRange::single_span(group.span()) }
|
ast::Markup::Splice {
|
||||||
|
expr: group.stream(),
|
||||||
|
outer_span: SpanRange::single_span(group.span()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Block
|
// Block
|
||||||
TokenTree::Group(ref group) if group.delimiter() == Delimiter::Brace => {
|
TokenTree::Group(ref group) if group.delimiter() == Delimiter::Brace => {
|
||||||
self.advance();
|
self.advance();
|
||||||
ast::Markup::Block(self.block(group.stream(), SpanRange::single_span(group.span())))
|
ast::Markup::Block(self.block(group.stream(), SpanRange::single_span(group.span())))
|
||||||
},
|
}
|
||||||
// ???
|
// ???
|
||||||
token => {
|
token => {
|
||||||
abort!(token, "invalid syntax");
|
abort!(token, "invalid syntax");
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
markup
|
markup
|
||||||
}
|
}
|
||||||
|
@ -199,24 +199,19 @@ impl Parser {
|
||||||
/// Parses an `@if` expression.
|
/// Parses an `@if` expression.
|
||||||
///
|
///
|
||||||
/// The leading `@if` should already be consumed.
|
/// The leading `@if` should already be consumed.
|
||||||
fn if_expr(
|
fn if_expr(&mut self, at_span: Span, prefix: Vec<TokenTree>, segments: &mut Vec<ast::Special>) {
|
||||||
&mut self,
|
|
||||||
at_span: Span,
|
|
||||||
prefix: Vec<TokenTree>,
|
|
||||||
segments: &mut Vec<ast::Special>,
|
|
||||||
) {
|
|
||||||
let mut head = prefix;
|
let mut head = prefix;
|
||||||
let body = loop {
|
let body = loop {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
||||||
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
||||||
},
|
}
|
||||||
Some(token) => head.push(token),
|
Some(token) => head.push(token),
|
||||||
None => {
|
None => {
|
||||||
let mut span = ast::span_tokens(head);
|
let mut span = ast::span_tokens(head);
|
||||||
span.first = at_span;
|
span.first = at_span;
|
||||||
abort!(span, "expected body for this `@if`");
|
abort!(span, "expected body for this `@if`");
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
segments.push(ast::Special {
|
segments.push(ast::Special {
|
||||||
|
@ -232,10 +227,9 @@ impl Parser {
|
||||||
/// The leading `@else if` or `@else` should *not* already be consumed.
|
/// The leading `@else if` or `@else` should *not* already be consumed.
|
||||||
fn else_if_expr(&mut self, segments: &mut Vec<ast::Special>) {
|
fn else_if_expr(&mut self, segments: &mut Vec<ast::Special>) {
|
||||||
match self.peek2() {
|
match self.peek2() {
|
||||||
Some((
|
Some((TokenTree::Punct(ref punct), Some(TokenTree::Ident(ref else_keyword))))
|
||||||
TokenTree::Punct(ref punct),
|
if punct.as_char() == '@' && *else_keyword == "else" =>
|
||||||
Some(TokenTree::Ident(ref else_keyword)),
|
{
|
||||||
)) if punct.as_char() == '@' && *else_keyword == "else" => {
|
|
||||||
self.advance2();
|
self.advance2();
|
||||||
let at_span = punct.span();
|
let at_span = punct.span();
|
||||||
let else_keyword = TokenTree::Ident(else_keyword.clone());
|
let else_keyword = TokenTree::Ident(else_keyword.clone());
|
||||||
|
@ -245,28 +239,32 @@ impl Parser {
|
||||||
self.advance();
|
self.advance();
|
||||||
let if_keyword = TokenTree::Ident(if_keyword.clone());
|
let if_keyword = TokenTree::Ident(if_keyword.clone());
|
||||||
self.if_expr(at_span, vec![else_keyword, if_keyword], segments)
|
self.if_expr(at_span, vec![else_keyword, if_keyword], segments)
|
||||||
},
|
}
|
||||||
// Just an `@else`
|
// Just an `@else`
|
||||||
_ => {
|
_ => match self.next() {
|
||||||
match self.next() {
|
Some(TokenTree::Group(ref group))
|
||||||
Some(TokenTree::Group(ref group)) if group.delimiter() == Delimiter::Brace => {
|
if group.delimiter() == Delimiter::Brace =>
|
||||||
let body = self.block(group.stream(), SpanRange::single_span(group.span()));
|
{
|
||||||
|
let body =
|
||||||
|
self.block(group.stream(), SpanRange::single_span(group.span()));
|
||||||
segments.push(ast::Special {
|
segments.push(ast::Special {
|
||||||
at_span: SpanRange::single_span(at_span),
|
at_span: SpanRange::single_span(at_span),
|
||||||
head: vec![else_keyword].into_iter().collect(),
|
head: vec![else_keyword].into_iter().collect(),
|
||||||
body,
|
body,
|
||||||
});
|
});
|
||||||
},
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let span = SpanRange { first: at_span, last: else_keyword.span() };
|
let span = SpanRange {
|
||||||
|
first: at_span,
|
||||||
|
last: else_keyword.span(),
|
||||||
|
};
|
||||||
abort!(span, "expected body for this `@else`");
|
abort!(span, "expected body for this `@else`");
|
||||||
},
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
// We didn't find an `@else`; stop
|
// We didn't find an `@else`; stop
|
||||||
_ => {},
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -280,16 +278,23 @@ impl Parser {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
||||||
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
||||||
},
|
}
|
||||||
Some(token) => head.push(token),
|
Some(token) => head.push(token),
|
||||||
None => {
|
None => {
|
||||||
let span = SpanRange { first: at_span, last: keyword_span };
|
let span = SpanRange {
|
||||||
|
first: at_span,
|
||||||
|
last: keyword_span,
|
||||||
|
};
|
||||||
abort!(span, "expected body for this `@while`");
|
abort!(span, "expected body for this `@while`");
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
ast::Markup::Special {
|
ast::Markup::Special {
|
||||||
segments: vec![ast::Special { at_span: SpanRange::single_span(at_span), head: head.into_iter().collect(), body }],
|
segments: vec![ast::Special {
|
||||||
|
at_span: SpanRange::single_span(at_span),
|
||||||
|
head: head.into_iter().collect(),
|
||||||
|
body,
|
||||||
|
}],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -304,28 +309,38 @@ impl Parser {
|
||||||
Some(TokenTree::Ident(ref in_keyword)) if *in_keyword == "in" => {
|
Some(TokenTree::Ident(ref in_keyword)) if *in_keyword == "in" => {
|
||||||
head.push(TokenTree::Ident(in_keyword.clone()));
|
head.push(TokenTree::Ident(in_keyword.clone()));
|
||||||
break;
|
break;
|
||||||
},
|
}
|
||||||
Some(token) => head.push(token),
|
Some(token) => head.push(token),
|
||||||
None => {
|
None => {
|
||||||
let span = SpanRange { first: at_span, last: keyword_span };
|
let span = SpanRange {
|
||||||
|
first: at_span,
|
||||||
|
last: keyword_span,
|
||||||
|
};
|
||||||
abort!(span, "missing `in` in `@for` loop");
|
abort!(span, "missing `in` in `@for` loop");
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let body = loop {
|
let body = loop {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
Some(TokenTree::Group(ref block)) if block.delimiter() == Delimiter::Brace => {
|
||||||
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
break self.block(block.stream(), SpanRange::single_span(block.span()));
|
||||||
},
|
}
|
||||||
Some(token) => head.push(token),
|
Some(token) => head.push(token),
|
||||||
None => {
|
None => {
|
||||||
let span = SpanRange { first: at_span, last: keyword_span };
|
let span = SpanRange {
|
||||||
|
first: at_span,
|
||||||
|
last: keyword_span,
|
||||||
|
};
|
||||||
abort!(span, "expected body for this `@for`");
|
abort!(span, "expected body for this `@for`");
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
ast::Markup::Special {
|
ast::Markup::Special {
|
||||||
segments: vec![ast::Special { at_span: SpanRange::single_span(at_span), head: head.into_iter().collect(), body }],
|
segments: vec![ast::Special {
|
||||||
|
at_span: SpanRange::single_span(at_span),
|
||||||
|
head: head.into_iter().collect(),
|
||||||
|
body,
|
||||||
|
}],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -340,15 +355,23 @@ impl Parser {
|
||||||
Some(TokenTree::Group(ref body)) if body.delimiter() == Delimiter::Brace => {
|
Some(TokenTree::Group(ref body)) if body.delimiter() == Delimiter::Brace => {
|
||||||
let span = SpanRange::single_span(body.span());
|
let span = SpanRange::single_span(body.span());
|
||||||
break (self.with_input(body.stream()).match_arms(), span);
|
break (self.with_input(body.stream()).match_arms(), span);
|
||||||
},
|
}
|
||||||
Some(token) => head.push(token),
|
Some(token) => head.push(token),
|
||||||
None => {
|
None => {
|
||||||
let span = SpanRange { first: at_span, last: keyword_span };
|
let span = SpanRange {
|
||||||
|
first: at_span,
|
||||||
|
last: keyword_span,
|
||||||
|
};
|
||||||
abort!(span, "expected body for this `@match`");
|
abort!(span, "expected body for this `@match`");
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
ast::Markup::Match { at_span: SpanRange::single_span(at_span), head: head.into_iter().collect(), arms, arms_span }
|
ast::Markup::Match {
|
||||||
|
at_span: SpanRange::single_span(at_span),
|
||||||
|
head: head.into_iter().collect(),
|
||||||
|
arms,
|
||||||
|
arms_span,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn match_arms(&mut self) -> Vec<ast::MatchArm> {
|
fn match_arms(&mut self) -> Vec<ast::MatchArm> {
|
||||||
|
@ -364,16 +387,19 @@ impl Parser {
|
||||||
loop {
|
loop {
|
||||||
match self.peek2() {
|
match self.peek2() {
|
||||||
Some((TokenTree::Punct(ref eq), Some(TokenTree::Punct(ref gt))))
|
Some((TokenTree::Punct(ref eq), Some(TokenTree::Punct(ref gt))))
|
||||||
if eq.as_char() == '=' && gt.as_char() == '>' && eq.spacing() == Spacing::Joint => {
|
if eq.as_char() == '='
|
||||||
|
&& gt.as_char() == '>'
|
||||||
|
&& eq.spacing() == Spacing::Joint =>
|
||||||
|
{
|
||||||
self.advance2();
|
self.advance2();
|
||||||
head.push(TokenTree::Punct(eq.clone()));
|
head.push(TokenTree::Punct(eq.clone()));
|
||||||
head.push(TokenTree::Punct(gt.clone()));
|
head.push(TokenTree::Punct(gt.clone()));
|
||||||
break;
|
break;
|
||||||
},
|
}
|
||||||
Some((token, _)) => {
|
Some((token, _)) => {
|
||||||
self.advance();
|
self.advance();
|
||||||
head.push(token);
|
head.push(token);
|
||||||
},
|
}
|
||||||
None => {
|
None => {
|
||||||
if head.is_empty() {
|
if head.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
|
@ -381,7 +407,7 @@ impl Parser {
|
||||||
let head_span = ast::span_tokens(head);
|
let head_span = ast::span_tokens(head);
|
||||||
abort!(head_span, "unexpected end of @match pattern");
|
abort!(head_span, "unexpected end of @match pattern");
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let body = match self.next() {
|
let body = match self.next() {
|
||||||
|
@ -395,7 +421,7 @@ impl Parser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
},
|
}
|
||||||
// $pat => $expr
|
// $pat => $expr
|
||||||
Some(first_token) => {
|
Some(first_token) => {
|
||||||
let mut span = SpanRange::single_span(first_token.span());
|
let mut span = SpanRange::single_span(first_token.span());
|
||||||
|
@ -406,18 +432,21 @@ impl Parser {
|
||||||
Some(token) => {
|
Some(token) => {
|
||||||
span.last = token.span();
|
span.last = token.span();
|
||||||
body.push(token);
|
body.push(token);
|
||||||
},
|
}
|
||||||
None => break,
|
None => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.block(body.into_iter().collect(), span)
|
self.block(body.into_iter().collect(), span)
|
||||||
},
|
}
|
||||||
None => {
|
None => {
|
||||||
let span = ast::span_tokens(head);
|
let span = ast::span_tokens(head);
|
||||||
abort!(span, "unexpected end of @match arm");
|
abort!(span, "unexpected end of @match arm");
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
Some(ast::MatchArm { head: head.into_iter().collect(), body })
|
Some(ast::MatchArm {
|
||||||
|
head: head.into_iter().collect(),
|
||||||
|
body,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a `@let` expression.
|
/// Parses a `@let` expression.
|
||||||
|
@ -427,14 +456,12 @@ impl Parser {
|
||||||
let mut tokens = vec![keyword];
|
let mut tokens = vec![keyword];
|
||||||
loop {
|
loop {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(token) => {
|
Some(token) => match token {
|
||||||
match token {
|
|
||||||
TokenTree::Punct(ref punct) if punct.as_char() == '=' => {
|
TokenTree::Punct(ref punct) if punct.as_char() == '=' => {
|
||||||
tokens.push(token.clone());
|
tokens.push(token.clone());
|
||||||
break;
|
break;
|
||||||
},
|
|
||||||
_ => tokens.push(token),
|
|
||||||
}
|
}
|
||||||
|
_ => tokens.push(token),
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
let mut span = ast::span_tokens(tokens);
|
let mut span = ast::span_tokens(tokens);
|
||||||
|
@ -445,14 +472,12 @@ impl Parser {
|
||||||
}
|
}
|
||||||
loop {
|
loop {
|
||||||
match self.next() {
|
match self.next() {
|
||||||
Some(token) => {
|
Some(token) => match token {
|
||||||
match token {
|
|
||||||
TokenTree::Punct(ref punct) if punct.as_char() == ';' => {
|
TokenTree::Punct(ref punct) if punct.as_char() == ';' => {
|
||||||
tokens.push(token.clone());
|
tokens.push(token.clone());
|
||||||
break;
|
break;
|
||||||
},
|
|
||||||
_ => tokens.push(token),
|
|
||||||
}
|
}
|
||||||
|
_ => tokens.push(token),
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
let mut span = ast::span_tokens(tokens);
|
let mut span = ast::span_tokens(tokens);
|
||||||
|
@ -462,10 +487,13 @@ impl Parser {
|
||||||
"unexpected end of `@let` expression";
|
"unexpected end of `@let` expression";
|
||||||
help = "are you missing a semicolon?"
|
help = "are you missing a semicolon?"
|
||||||
);
|
);
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::Markup::Let { at_span: SpanRange::single_span(at_span), tokens: tokens.into_iter().collect() }
|
}
|
||||||
|
ast::Markup::Let {
|
||||||
|
at_span: SpanRange::single_span(at_span),
|
||||||
|
tokens: tokens.into_iter().collect(),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses an element node.
|
/// Parses an element node.
|
||||||
|
@ -479,13 +507,15 @@ impl Parser {
|
||||||
let attrs = self.attrs();
|
let attrs = self.attrs();
|
||||||
let body = match self.peek() {
|
let body = match self.peek() {
|
||||||
Some(TokenTree::Punct(ref punct))
|
Some(TokenTree::Punct(ref punct))
|
||||||
if punct.as_char() == ';' || punct.as_char() == '/' => {
|
if punct.as_char() == ';' || punct.as_char() == '/' =>
|
||||||
|
{
|
||||||
// Void element
|
// Void element
|
||||||
self.advance();
|
self.advance();
|
||||||
ast::ElementBody::Void { semi_span: SpanRange::single_span(punct.span()) }
|
ast::ElementBody::Void {
|
||||||
},
|
semi_span: SpanRange::single_span(punct.span()),
|
||||||
_ => {
|
}
|
||||||
match self.markup() {
|
}
|
||||||
|
_ => match self.markup() {
|
||||||
ast::Markup::Block(block) => ast::ElementBody::Block { block },
|
ast::Markup::Block(block) => ast::ElementBody::Block { block },
|
||||||
markup => {
|
markup => {
|
||||||
let markup_span = markup.span();
|
let markup_span = markup.span();
|
||||||
|
@ -494,7 +524,6 @@ impl Parser {
|
||||||
"element body must be wrapped in braces";
|
"element body must be wrapped in braces";
|
||||||
help = "see https://github.com/lambda-fairy/maud/pull/137 for details"
|
help = "see https://github.com/lambda-fairy/maud/pull/137 for details"
|
||||||
);
|
);
|
||||||
},
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -525,7 +554,7 @@ impl Parser {
|
||||||
attr_type: ast::AttrType::Normal { value },
|
attr_type: ast::AttrType::Normal { value },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
},
|
}
|
||||||
// Empty attribute
|
// Empty attribute
|
||||||
(Some(ref name), Some(TokenTree::Punct(ref punct))) if punct.as_char() == '?' => {
|
(Some(ref name), Some(TokenTree::Punct(ref punct))) if punct.as_char() == '?' => {
|
||||||
self.commit(attempt);
|
self.commit(attempt);
|
||||||
|
@ -536,20 +565,27 @@ impl Parser {
|
||||||
attr_type: ast::AttrType::Empty { toggler },
|
attr_type: ast::AttrType::Empty { toggler },
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
},
|
}
|
||||||
// Class shorthand
|
// Class shorthand
|
||||||
(None, Some(TokenTree::Punct(ref punct))) if punct.as_char() == '.' => {
|
(None, Some(TokenTree::Punct(ref punct))) if punct.as_char() == '.' => {
|
||||||
self.commit(attempt);
|
self.commit(attempt);
|
||||||
let name = self.class_or_id_name();
|
let name = self.class_or_id_name();
|
||||||
let toggler = self.attr_toggler();
|
let toggler = self.attr_toggler();
|
||||||
attrs.push(ast::Attr::Class { dot_span: SpanRange::single_span(punct.span()), name, toggler });
|
attrs.push(ast::Attr::Class {
|
||||||
},
|
dot_span: SpanRange::single_span(punct.span()),
|
||||||
|
name,
|
||||||
|
toggler,
|
||||||
|
});
|
||||||
|
}
|
||||||
// ID shorthand
|
// ID shorthand
|
||||||
(None, Some(TokenTree::Punct(ref punct))) if punct.as_char() == '#' => {
|
(None, Some(TokenTree::Punct(ref punct))) if punct.as_char() == '#' => {
|
||||||
self.commit(attempt);
|
self.commit(attempt);
|
||||||
let name = self.class_or_id_name();
|
let name = self.class_or_id_name();
|
||||||
attrs.push(ast::Attr::Id { hash_span: SpanRange::single_span(punct.span()), name });
|
attrs.push(ast::Attr::Id {
|
||||||
},
|
hash_span: SpanRange::single_span(punct.span()),
|
||||||
|
name,
|
||||||
|
});
|
||||||
|
}
|
||||||
// If it's not a valid attribute, backtrack and bail out
|
// If it's not a valid attribute, backtrack and bail out
|
||||||
_ => break,
|
_ => break,
|
||||||
}
|
}
|
||||||
|
@ -566,11 +602,14 @@ impl Parser {
|
||||||
}
|
}
|
||||||
has_class = true;
|
has_class = true;
|
||||||
"class".to_string()
|
"class".to_string()
|
||||||
},
|
}
|
||||||
ast::Attr::Id { .. } => "id".to_string(),
|
ast::Attr::Id { .. } => "id".to_string(),
|
||||||
ast::Attr::Attribute { attribute } => {
|
ast::Attr::Attribute { attribute } => attribute
|
||||||
attribute.name.clone().into_iter().map(|token| token.to_string()).collect()
|
.name
|
||||||
},
|
.clone()
|
||||||
|
.into_iter()
|
||||||
|
.map(|token| token.to_string())
|
||||||
|
.collect(),
|
||||||
};
|
};
|
||||||
let entry = attr_map.entry(name).or_default();
|
let entry = attr_map.entry(name).or_default();
|
||||||
entry.push(attr.span());
|
entry.push(attr.span());
|
||||||
|
@ -605,7 +644,7 @@ impl Parser {
|
||||||
cond: group.stream(),
|
cond: group.stream(),
|
||||||
cond_span: SpanRange::single_span(group.span()),
|
cond_span: SpanRange::single_span(group.span()),
|
||||||
})
|
})
|
||||||
},
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -626,12 +665,12 @@ impl Parser {
|
||||||
self.advance();
|
self.advance();
|
||||||
result.push(TokenTree::Punct(punct.clone()));
|
result.push(TokenTree::Punct(punct.clone()));
|
||||||
true
|
true
|
||||||
},
|
}
|
||||||
Some(TokenTree::Ident(ref ident)) if expect_ident => {
|
Some(TokenTree::Ident(ref ident)) if expect_ident => {
|
||||||
self.advance();
|
self.advance();
|
||||||
result.push(TokenTree::Ident(ident.clone()));
|
result.push(TokenTree::Ident(ident.clone()));
|
||||||
false
|
false
|
||||||
},
|
}
|
||||||
_ => break,
|
_ => break,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -655,6 +694,9 @@ impl Parser {
|
||||||
/// Parses the given token stream as a Maud expression.
|
/// Parses the given token stream as a Maud expression.
|
||||||
fn block(&mut self, body: TokenStream, outer_span: SpanRange) -> ast::Block {
|
fn block(&mut self, body: TokenStream, outer_span: SpanRange) -> ast::Block {
|
||||||
let markups = self.with_input(body).markups();
|
let markups = self.with_input(body).markups();
|
||||||
ast::Block { markups, outer_span }
|
ast::Block {
|
||||||
|
markups,
|
||||||
|
outer_span,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue