Run Clippy on docs builder (#319)
This commit is contained in:
parent
982c9f4a03
commit
12cec48134
3 changed files with 12 additions and 11 deletions
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
|
@ -53,10 +53,14 @@ jobs:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
profile: minimal
|
profile: minimal
|
||||||
override: true
|
override: true
|
||||||
|
components: clippy
|
||||||
|
|
||||||
- name: Build documentation
|
- name: Build documentation
|
||||||
run: cd docs && make -j$(nproc)
|
run: cd docs && make -j$(nproc)
|
||||||
|
|
||||||
|
- name: Check Clippy
|
||||||
|
run: cd docs && cargo clippy --all-targets -- -D warnings
|
||||||
|
|
||||||
examples:
|
examples:
|
||||||
name: Examples
|
name: Examples
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -3,18 +3,17 @@ use docs::{
|
||||||
page::{Page, COMRAK_OPTIONS},
|
page::{Page, COMRAK_OPTIONS},
|
||||||
string_writer::StringWriter,
|
string_writer::StringWriter,
|
||||||
};
|
};
|
||||||
use serde_json;
|
|
||||||
use std::{env, error::Error, fs, io, path::Path, str};
|
use std::{env, error::Error, fs, io, path::Path, str};
|
||||||
|
|
||||||
fn main() -> Result<(), Box<dyn Error>> {
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
let args = env::args().collect::<Vec<_>>();
|
let args = env::args().collect::<Vec<_>>();
|
||||||
if args.len() < 2 || !args[2..].iter().all(|arg| arg.contains(":")) {
|
if args.len() < 2 || !args[2..].iter().all(|arg| arg.contains(':')) {
|
||||||
return Err("invalid arguments".into());
|
return Err("invalid arguments".into());
|
||||||
}
|
}
|
||||||
let entries = args[2..]
|
let entries = args[2..]
|
||||||
.iter()
|
.iter()
|
||||||
.map(|arg| {
|
.map(|arg| {
|
||||||
let mut splits = arg.splitn(2, ":");
|
let mut splits = arg.splitn(2, ':');
|
||||||
let slug = splits.next().unwrap();
|
let slug = splits.next().unwrap();
|
||||||
let input_path = splits.next().unwrap();
|
let input_path = splits.next().unwrap();
|
||||||
(slug, input_path)
|
(slug, input_path)
|
||||||
|
@ -35,7 +34,7 @@ fn build_nav(entries: &[(&str, &str)], nav_path: &str) -> Result<(), Box<dyn Err
|
||||||
.collect::<io::Result<Vec<_>>>()?;
|
.collect::<io::Result<Vec<_>>>()?;
|
||||||
|
|
||||||
// Only write if different to avoid spurious rebuilds
|
// Only write if different to avoid spurious rebuilds
|
||||||
let old_string = fs::read_to_string(nav_path).unwrap_or(String::new());
|
let old_string = fs::read_to_string(nav_path).unwrap_or_default();
|
||||||
let new_string = serde_json::to_string_pretty(&nav)?;
|
let new_string = serde_json::to_string_pretty(&nav)?;
|
||||||
if old_string != new_string {
|
if old_string != new_string {
|
||||||
fs::create_dir_all(Path::new(nav_path).parent().unwrap())?;
|
fs::create_dir_all(Path::new(nav_path).parent().unwrap())?;
|
||||||
|
|
|
@ -7,13 +7,11 @@ use docs::{
|
||||||
page::{Page, COMRAK_OPTIONS},
|
page::{Page, COMRAK_OPTIONS},
|
||||||
views,
|
views,
|
||||||
};
|
};
|
||||||
use serde_json;
|
|
||||||
use std::{
|
use std::{
|
||||||
env,
|
env,
|
||||||
error::Error,
|
error::Error,
|
||||||
fs::{self, File},
|
fs::{self, File},
|
||||||
io::BufReader,
|
io::BufReader,
|
||||||
mem,
|
|
||||||
path::Path,
|
path::Path,
|
||||||
str::{self, Utf8Error},
|
str::{self, Utf8Error},
|
||||||
string::FromUtf8Error,
|
string::FromUtf8Error,
|
||||||
|
@ -90,7 +88,7 @@ fn rewrite_md_links<'a>(root: &'a AstNode<'a>) -> Result<(), FromUtf8Error> {
|
||||||
for node in root.descendants() {
|
for node in root.descendants() {
|
||||||
let mut data = node.data.borrow_mut();
|
let mut data = node.data.borrow_mut();
|
||||||
if let NodeValue::Link(NodeLink { url, .. }) = &mut data.value {
|
if let NodeValue::Link(NodeLink { url, .. }) = &mut data.value {
|
||||||
let mut url_string = String::from_utf8(mem::replace(url, Vec::new()))?;
|
let mut url_string = String::from_utf8(std::mem::take(url))?;
|
||||||
if url_string.ends_with(".md") {
|
if url_string.ends_with(".md") {
|
||||||
url_string.truncate(url_string.len() - ".md".len());
|
url_string.truncate(url_string.len() - ".md".len());
|
||||||
url_string.push_str(".html");
|
url_string.push_str(".html");
|
||||||
|
@ -117,7 +115,7 @@ fn strip_hidden_code<'a>(root: &'a AstNode<'a>) -> Result<(), Box<dyn Error>> {
|
||||||
|
|
||||||
fn strip_hidden_code_inner(literal: &str) -> String {
|
fn strip_hidden_code_inner(literal: &str) -> String {
|
||||||
let lines = literal
|
let lines = literal
|
||||||
.split("\n")
|
.split('\n')
|
||||||
.filter(|line| {
|
.filter(|line| {
|
||||||
let line = line.trim();
|
let line = line.trim();
|
||||||
line != "#" && !line.starts_with("# ")
|
line != "#" && !line.starts_with("# ")
|
||||||
|
@ -142,10 +140,10 @@ fn highlight_code<'a>(root: &'a AstNode<'a>) -> Result<(), Box<dyn Error>> {
|
||||||
let info = parse_code_block_info(info)?;
|
let info = parse_code_block_info(info)?;
|
||||||
let syntax = info
|
let syntax = info
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|token| ss.find_syntax_by_token(&token))
|
.filter_map(|token| ss.find_syntax_by_token(token))
|
||||||
.next()
|
.next()
|
||||||
.unwrap_or_else(|| ss.find_syntax_plain_text());
|
.unwrap_or_else(|| ss.find_syntax_plain_text());
|
||||||
let mut literal = String::from_utf8(mem::replace(literal, Vec::new()))?;
|
let mut literal = String::from_utf8(std::mem::take(literal))?;
|
||||||
if !literal.ends_with('\n') {
|
if !literal.ends_with('\n') {
|
||||||
// Syntect expects a trailing newline
|
// Syntect expects a trailing newline
|
||||||
literal.push('\n');
|
literal.push('\n');
|
||||||
|
@ -160,5 +158,5 @@ fn highlight_code<'a>(root: &'a AstNode<'a>) -> Result<(), Box<dyn Error>> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_code_block_info(info: &[u8]) -> Result<Vec<&str>, Utf8Error> {
|
fn parse_code_block_info(info: &[u8]) -> Result<Vec<&str>, Utf8Error> {
|
||||||
str::from_utf8(info).map(|info| info.split(",").map(str::trim).collect())
|
str::from_utf8(info).map(|info| info.split(',').map(str::trim).collect())
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue