diff --git a/src/build.rs b/src/build.rs index 5f7ef86..5dec199 100644 --- a/src/build.rs +++ b/src/build.rs @@ -1,7 +1,121 @@ use std::fs; +use std::fs::File; +use std::io; +use std::io::Write; +use std::path::Path; +use std::process::Command; +use camino::Utf8Path; -pub fn build_styles() { - let css = grass::from_path("styles/styles.scss", &grass::Options::default()).unwrap(); - fs::write("dist/styles.css", css).unwrap(); +use crate::pipeline::{AssetKind, Output, OutputKind, Sack, Virtual}; +use crate::BuildContext; + +pub(crate) fn clean_dist() { + println!("Cleaning dist"); + if fs::metadata("dist").is_ok() { + fs::remove_dir_all("dist").unwrap(); + } + fs::create_dir("dist").unwrap(); +} + +pub(crate) fn build_styles() { + let css = grass::from_path("styles/styles.scss", &grass::Options::default()).unwrap(); + fs::write("dist/styles.css", css).unwrap(); +} + +pub(crate) fn build_content(ctx: &BuildContext, content: &[&Output]) { + let now = std::time::Instant::now(); + render_all(ctx, content); + println!("Elapsed: {:.2?}", now.elapsed()); +} + +pub(crate) fn build_static() { + copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap(); +} + +pub(crate) fn build_pagefind() { + let res = Command::new("pagefind") + .args(["--site", "dist"]) + .output() + .unwrap(); + + println!("{}", String::from_utf8(res.stdout).unwrap()); +} + +pub(crate) fn build_js() { + let res = Command::new("esbuild") + .arg("js/vanilla/reveal.js") + .arg("js/vanilla/photos.ts") + .arg("js/search/dist/search.js") + .arg("--format=esm") + .arg("--bundle") + .arg("--splitting") + .arg("--minify") + .arg("--outdir=dist/js/") + .output() + .unwrap(); + + println!("{}", String::from_utf8(res.stderr).unwrap()); +} + +fn copy_recursively(src: impl AsRef, dst: impl AsRef) -> io::Result<()> { + fs::create_dir_all(&dst)?; + for entry in fs::read_dir(src)? { + let entry = entry?; + let filetype = entry.file_type()?; + if filetype.is_dir() { + copy_recursively(entry.path(), dst.as_ref().join(entry.file_name()))?; + } else { + fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?; + } + } + Ok(()) +} + +fn render_all(ctx: &BuildContext, items: &[&Output]) { + for item in items { + let file = match &item.kind { + OutputKind::Asset(a) => Some(&a.meta.path), + OutputKind::Virtual(_) => None, + }; + render( + item, + Sack { + ctx, + hole: items, + path: &item.path, + file, + }, + ); + } +} + +fn render(item: &Output, sack: Sack) { + let o = Utf8Path::new("dist").join(&item.path); + fs::create_dir_all(o.parent().unwrap()).unwrap(); + + match item.kind { + OutputKind::Asset(ref real) => { + let i = &real.meta.path; + + match &real.kind { + AssetKind::Html(closure) => { + let mut file = File::create(&o).unwrap(); + file.write_all(closure(&sack).as_bytes()).unwrap(); + println!("HTML: {} -> {}", i, o); + } + AssetKind::Bibtex(_) => {} + AssetKind::Image => { + fs::create_dir_all(o.parent().unwrap()).unwrap(); + fs::copy(i, &o).unwrap(); + println!("Image: {} -> {}", i, o); + } + }; + } + OutputKind::Virtual(Virtual(ref closure)) => { + let mut file = File::create(&o).unwrap(); + file.write_all(closure(&sack).as_bytes()).unwrap(); + println!("Virtual: -> {}", o); + } + } } diff --git a/src/main.rs b/src/main.rs index 5373a2c..edfa589 100644 --- a/src/main.rs +++ b/src/main.rs @@ -19,7 +19,6 @@ use hypertext::{Raw, Renderable}; use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem}; use serde::Deserialize; -use crate::build::build_styles; use crate::pipeline::Virtual; #[derive(Parser, Debug, Clone)] @@ -83,21 +82,97 @@ fn main() { .into(), }; + let sources = &[ + Source { + path: "content/about.md", + exts: ["md"].into(), + func: process_content::, + }, + Source { + path: "content/posts/**/*", + exts: ["md", "mdx"].into(), + func: process_content::, + }, + Source { + path: "content/slides/**/*", + exts: ["md", "lhs"].into(), + func: process_content::, + }, + Source { + path: "content/wiki/**/*", + exts: ["md"].into(), + func: process_content::, + }, + ]; + + let special = &[ + Output { + kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(), + path: "map/index.html".into(), + link: None, + }, + Output { + kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into()).into(), + path: "search/index.html".into(), + link: None, + }, + Output { + kind: Asset { + kind: pipeline::AssetKind::html(|sack| { + let data = std::fs::read_to_string("content/index.md").unwrap(); + let (_, html, _) = text::md::parse(data, None); + crate::html::home(sack, Raw(html)) + .render() + .to_owned() + .into() + }), + meta: pipeline::FileItem { + kind: pipeline::FileItemKind::Index, + path: "content/index.md".into(), + }, + } + .into(), + path: "index.html".into(), + link: None, + }, + Output { + kind: Virtual::new(|sack| { + crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into()) + }) + .into(), + path: "posts/index.html".into(), + link: None, + }, + Output { + kind: Virtual::new(|sack| { + crate::html::to_list( + sack, + sack.get_links("slides/**/*.html"), + "Slideshows".into(), + ) + }) + .into(), + path: "slides/index.html".into(), + link: None, + }, + ]; + match args.mode { Mode::Build => { - build(&ctx); + build(&ctx, sources, special); } Mode::Watch => { - build(&ctx); - watch::watch().unwrap() + build(&ctx, sources, special); + watch::watch(&ctx, sources).unwrap() } } } +#[derive(Debug)] struct Source { - path: &'static str, - exts: HashSet<&'static str>, - func: fn(PipelineItem) -> PipelineItem, + pub path: &'static str, + pub exts: HashSet<&'static str>, + pub func: fn(PipelineItem) -> PipelineItem, } impl Source { @@ -107,155 +182,57 @@ impl Source { .map(self.func) .collect() } + + fn get_maybe(&self, path: &Utf8Path) -> Option { + let pattern = glob::Pattern::new(self.path).expect("Bad pattern"); + if !pattern.matches_path(path.as_std_path()) { + return None; + }; + + let item = match path.is_file() { + true => Some(crate::pipeline::to_source(path.to_owned(), &self.exts)), + false => None, + }; + + item.map(Into::into).map(self.func) + } } -fn build(ctx: &BuildContext) { - if fs::metadata("dist").is_ok() { - println!("Cleaning dist"); - fs::remove_dir_all("dist").unwrap(); - } +fn build(ctx: &BuildContext, sources: &[Source], special: &[Output]) { + crate::build::clean_dist(); - fs::create_dir("dist").unwrap(); - - let sources = vec![ - Source { - path: "content/about.md", - exts: ["md"].into(), - func: as_index::, - }, - Source { - path: "content/posts/**/*", - exts: ["md", "mdx"].into(), - func: as_index::, - }, - Source { - path: "content/slides/**/*", - exts: ["md", "lhs"].into(), - func: as_index::, - }, - Source { - path: "content/wiki/**/*", - exts: ["md"].into(), - func: as_index::, - }, - ]; - - let assets: Vec = sources + let sources: Vec<_> = sources .iter() .flat_map(Source::get) .map(to_bundle) - .filter_map(|item| match item { - PipelineItem::Skip(skip) => { - println!("Skipping {}", skip.path); - None - } - PipelineItem::Take(take) => Some(take), - }) + .filter_map(Option::from) .collect(); - let assets: Vec = vec![ - assets, - vec![ - Output { - kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(), - path: "map/index.html".into(), - link: None, - }, - Output { - kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into()) - .into(), - path: "search/index.html".into(), - link: None, - }, - Output { - kind: Asset { - kind: pipeline::AssetKind::html(|sack| { - let data = std::fs::read_to_string("content/index.md").unwrap(); - let (_, html, _) = text::md::parse(data, None); - crate::html::home(sack, Raw(html)) - .render() - .to_owned() - .into() - }), - meta: pipeline::FileItem { - kind: pipeline::FileItemKind::Index, - path: "content/index.md".into(), - }, - } - .into(), - path: "index.html".into(), - link: None, - }, - Output { - kind: Virtual::new(|sack| { - crate::html::to_list(sack, sack.get_links("posts/**/*.html"), "Posts".into()) - }) - .into(), - path: "posts/index.html".into(), - link: None, - }, - Output { - kind: Virtual::new(|sack| { - crate::html::to_list(sack, sack.get_links("slides/**/*.html"), "Slideshows".into()) - }) - .into(), - path: "slides/index.html".into(), - link: None, - }, - ], - ] - .into_iter() - .flatten() - .collect(); + let assets: Vec<_> = sources.iter().chain(special).collect(); - { - let now = std::time::Instant::now(); - pipeline::render_all(ctx, &assets); - println!("Elapsed: {:.2?}", now.elapsed()); - } - - utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap(); - - build_styles(); - - let res = Command::new("pagefind") - .args(["--site", "dist"]) - .output() - .unwrap(); - - println!("{}", String::from_utf8(res.stdout).unwrap()); - - let res = Command::new("esbuild") - .arg("js/vanilla/reveal.js") - .arg("js/vanilla/photos.ts") - .arg("js/search/dist/search.js") - .arg("--format=esm") - .arg("--bundle") - .arg("--splitting") - .arg("--minify") - .arg("--outdir=dist/js/") - .output() - .unwrap(); - - println!("{}", String::from_utf8(res.stderr).unwrap()); + crate::build::build_content(ctx, &assets); + crate::build::build_static(); + crate::build::build_styles(); + crate::build::build_pagefind(); + crate::build::build_js(); } -pub fn parse_frontmatter(raw: &str) -> (T, String) +pub fn parse_frontmatter(raw: &str) -> (D, String) where - T: for<'de> Deserialize<'de>, + D: for<'de> Deserialize<'de>, { - let matter = Matter::::new(); - let result = matter.parse(raw); + let parser = Matter::::new(); + let result = parser.parse_with_struct::(raw).unwrap(); ( // Just the front matter - result.data.unwrap().deserialize::().unwrap(), + result.data, // The rest of the content result.content, ) } -fn as_index(item: PipelineItem) -> PipelineItem +fn process_content(item: PipelineItem) -> PipelineItem where T: for<'de> Deserialize<'de> + Content + Clone + 'static, { @@ -273,16 +250,16 @@ where match meta.path.extension() { Some("md" | "mdx" | "lhs") => { - let data = fs::read_to_string(&meta.path).unwrap(); - let (fm, md) = parse_frontmatter::(&data); - let link = T::as_link(&fm, Utf8Path::new("/").join(dir)); + let raw = fs::read_to_string(&meta.path).unwrap(); + let (matter, parsed) = parse_frontmatter::(&raw); + let link = T::as_link(&matter, Utf8Path::new("/").join(dir)); Output { kind: Asset { kind: pipeline::AssetKind::html(move |sack| { let lib = sack.get_library(); - let (outline, parsed, bib) = T::parse(md.clone(), lib); - T::render(fm.clone(), sack, Raw(parsed), outline, bib) + let (outline, parsed, bib) = T::parse(parsed.clone(), lib); + T::render(matter.clone(), sack, Raw(parsed), outline, bib) .render() .into() }), diff --git a/src/pipeline.rs b/src/pipeline.rs index 58ea7d2..c237c21 100644 --- a/src/pipeline.rs +++ b/src/pipeline.rs @@ -2,8 +2,8 @@ //! loading the data from hard drive, and then processing it further depending on the file type. use std::collections::{HashMap, HashSet}; -use std::fs::{self, File}; -use std::io::Write; +use std::fmt::Debug; +use std::usize; use camino::{Utf8Path, Utf8PathBuf}; use glob::glob; @@ -68,6 +68,20 @@ pub(crate) enum AssetKind { Image, } +impl Debug for AssetKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Html(fun) => { + // rust mental gymnastics moment + let ptr = &**fun as *const dyn Fn(&Sack) -> String as *const () as usize; + f.debug_tuple("Html").field(&ptr).finish() + }, + Self::Bibtex(b) => f.debug_tuple("Bibtex").field(b).finish(), + Self::Image => write!(f, "Image"), + } + } +} + impl AssetKind { pub fn html(f: impl Fn(&Sack) -> String + 'static) -> Self { Self::Html(Box::new(f)) @@ -75,6 +89,7 @@ impl AssetKind { } /// Asset corresponding to a file on disk. +#[derive(Debug)] pub(crate) struct Asset { /// The kind of a processed asset. pub kind: AssetKind, @@ -84,7 +99,7 @@ pub(crate) struct Asset { /// Dynamically generated asset not corresponding to any file on disk. This is useful when the /// generated page is not a content page, e.g. page list. -pub(crate) struct Virtual(Box String>); +pub(crate) struct Virtual(pub Box String>); impl Virtual { pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self { @@ -92,7 +107,16 @@ impl Virtual { } } +impl Debug for Virtual { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // rust mental gymnastics moment + let ptr = &*self.0 as *const dyn Fn(&Sack) -> String as *const () as usize; + f.debug_tuple("Virtual").field(&ptr).finish() + } +} + /// The kind of an output item. +#[derive(Debug)] pub(crate) enum OutputKind { /// Marks an output item which corresponds to a file on disk. Asset(Asset), @@ -113,6 +137,7 @@ impl From for OutputKind { } /// Renderable output +#[derive(Debug)] pub(crate) struct Output { /// The kind of an output item pub(crate) kind: OutputKind, @@ -124,6 +149,7 @@ pub(crate) struct Output { /// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as /// `Take`, which means it needs to have an output location assigned to itself. +#[derive(Debug)] pub(crate) enum PipelineItem { /// Unclaimed file. Skip(FileItem), @@ -143,16 +169,25 @@ impl From for PipelineItem { } } +impl From for Option { + fn from(value: PipelineItem) -> Self { + match value { + PipelineItem::Skip(_) => None, + PipelineItem::Take(e) => Some(e), + } + } +} + /// This struct allows for querying the website hierarchy. It is passed to each rendered website /// page, so that it can easily access the website metadata. pub(crate) struct Sack<'a> { pub ctx: &'a BuildContext, /// Literally all of the content - hole: &'a [Output], + pub hole: &'a [&'a Output], /// Current path for the page being rendered - path: &'a Utf8PathBuf, + pub path: &'a Utf8PathBuf, /// Original file location for this page - file: Option<&'a Utf8PathBuf>, + pub file: Option<&'a Utf8PathBuf>, } impl<'a> Sack<'a> { @@ -254,7 +289,7 @@ pub fn gather(pattern: &str, exts: &HashSet<&'static str>) -> Vec .collect() } -fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem { +pub(crate) fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem { let hit = path.extension().map_or(false, |ext| exts.contains(ext)); let kind = match hit { @@ -264,51 +299,3 @@ fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem { FileItem { kind, path } } - -pub fn render_all(ctx: &BuildContext, items: &[Output]) { - for item in items { - let file = match &item.kind { - OutputKind::Asset(a) => Some(&a.meta.path), - OutputKind::Virtual(_) => None, - }; - render( - item, - Sack { - ctx, - hole: items, - path: &item.path, - file, - }, - ); - } -} - -fn render(item: &Output, sack: Sack) { - let o = Utf8Path::new("dist").join(&item.path); - fs::create_dir_all(o.parent().unwrap()).unwrap(); - - match item.kind { - OutputKind::Asset(ref real) => { - let i = &real.meta.path; - - match &real.kind { - AssetKind::Html(closure) => { - let mut file = File::create(&o).unwrap(); - file.write_all(closure(&sack).as_bytes()).unwrap(); - println!("HTML: {} -> {}", i, o); - } - AssetKind::Bibtex(_) => {} - AssetKind::Image => { - fs::create_dir_all(o.parent().unwrap()).unwrap(); - fs::copy(i, &o).unwrap(); - println!("Image: {} -> {}", i, o); - } - }; - } - OutputKind::Virtual(Virtual(ref closure)) => { - let mut file = File::create(&o).unwrap(); - file.write_all(closure(&sack).as_bytes()).unwrap(); - println!("Virtual: -> {}", o); - } - } -} diff --git a/src/utils.rs b/src/utils.rs index bd3d78c..e69de29 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,18 +0,0 @@ -use std::fs; -use std::io; -use std::path::Path; - - -pub fn copy_recursively(src: impl AsRef, dst: impl AsRef) -> io::Result<()> { - fs::create_dir_all(&dst)?; - for entry in fs::read_dir(src)? { - let entry = entry?; - let filetype = entry.file_type()?; - if filetype.is_dir() { - copy_recursively(entry.path(), dst.as_ref().join(entry.file_name()))?; - } else { - fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?; - } - } - Ok(()) -} diff --git a/src/watch.rs b/src/watch.rs index 484a954..19ce121 100644 --- a/src/watch.rs +++ b/src/watch.rs @@ -1,3 +1,4 @@ +use std::env; use std::io::Result; use std::net::{TcpListener, TcpStream}; use std::path::Path; @@ -6,89 +7,110 @@ use std::sync::{Arc, Mutex}; use std::thread::JoinHandle; use std::time::Duration; +use camino::Utf8PathBuf; use notify::RecursiveMode; use notify_debouncer_mini::new_debouncer; use tungstenite::WebSocket; -use crate::build::build_styles; - +use crate::build::{build_content, build_styles}; +use crate::pipeline::Output; +use crate::{BuildContext, Source}; fn new_thread_ws_incoming( - server: TcpListener, - client: Arc>>>, + server: TcpListener, + client: Arc>>>, ) -> JoinHandle<()> { - std::thread::spawn(move || { - for stream in server.incoming() { - let socket = tungstenite::accept(stream.unwrap()).unwrap(); - client.lock().unwrap().push(socket); - } - }) + std::thread::spawn(move || { + for stream in server.incoming() { + let socket = tungstenite::accept(stream.unwrap()).unwrap(); + client.lock().unwrap().push(socket); + } + }) } fn new_thread_ws_reload( - client: Arc>>>, + client: Arc>>>, ) -> (Sender<()>, JoinHandle<()>) { - let (tx, rx) = std::sync::mpsc::channel(); + let (tx, rx) = std::sync::mpsc::channel(); - let thread = std::thread::spawn(move || { - while rx.recv().is_ok() { - let mut clients = client.lock().unwrap(); - let mut broken = vec![]; + let thread = std::thread::spawn(move || { + while rx.recv().is_ok() { + let mut clients = client.lock().unwrap(); + let mut broken = vec![]; - for (i, socket) in clients.iter_mut().enumerate() { - match socket.send("reload".into()) { - Ok(_) => {} - Err(tungstenite::error::Error::Io(e)) => { - if e.kind() == std::io::ErrorKind::BrokenPipe { - broken.push(i); - } - } - Err(e) => { - eprintln!("Error: {:?}", e); - } - } - } + for (i, socket) in clients.iter_mut().enumerate() { + match socket.send("reload".into()) { + Ok(_) => {} + Err(tungstenite::error::Error::Io(e)) => { + if e.kind() == std::io::ErrorKind::BrokenPipe { + broken.push(i); + } + } + Err(e) => { + eprintln!("Error: {:?}", e); + } + } + } - for i in broken.into_iter().rev() { - clients.remove(i); - } + for i in broken.into_iter().rev() { + clients.remove(i); + } - // Close all but the last 10 connections - let len = clients.len(); - if len > 10 { - for mut socket in clients.drain(0..len - 10) { - socket.close(None).ok(); - } - } - } - }); + // Close all but the last 10 connections + let len = clients.len(); + if len > 10 { + for mut socket in clients.drain(0..len - 10) { + socket.close(None).ok(); + } + } + } + }); - (tx, thread) + (tx, thread) } +pub fn watch(ctx: &BuildContext, sources: &[Source]) -> Result<()> { + let root = env::current_dir().unwrap(); + let server = TcpListener::bind("127.0.0.1:1337")?; + let client = Arc::new(Mutex::new(vec![])); -pub fn watch() -> Result<()> { - let server = TcpListener::bind("127.0.0.1:1337")?; - let client = Arc::new(Mutex::new(vec![])); + let (tx, rx) = std::sync::mpsc::channel(); + let mut debouncer = new_debouncer(Duration::from_millis(250), tx).unwrap(); - let (tx, rx) = std::sync::mpsc::channel(); - let mut debouncer = new_debouncer(Duration::from_secs(2), tx).unwrap(); + debouncer + .watcher() + .watch(Path::new("styles"), RecursiveMode::Recursive) + .unwrap(); - debouncer - .watcher() - .watch(Path::new("./styles"), RecursiveMode::Recursive) - .unwrap(); + debouncer + .watcher() + .watch(Path::new("content"), RecursiveMode::Recursive) + .unwrap(); - let thread_i = new_thread_ws_incoming(server, client.clone()); - let (tx_reload, thread_o) = new_thread_ws_reload(client.clone()); + let thread_i = new_thread_ws_incoming(server, client.clone()); + let (tx_reload, thread_o) = new_thread_ws_reload(client.clone()); - while let Ok(_ev) = rx.recv().unwrap() { - build_styles(); - tx_reload.send(()).unwrap(); - } + while let Ok(events) = rx.recv().unwrap() { + let items = events + .into_iter() + .filter_map(|event| { + Utf8PathBuf::from_path_buf(event.path) + .ok() + .and_then(|path| path.strip_prefix(&root).ok().map(ToOwned::to_owned)) + .and_then(|path| sources.iter().find_map(|s| s.get_maybe(&path))) + }) + .filter_map(Option::from) + .collect::>(); - thread_i.join().unwrap(); - thread_o.join().unwrap(); + let items = items.iter().collect::>(); - Ok(()) + build_content(ctx, &items); + build_styles(); + tx_reload.send(()).unwrap(); + } + + thread_i.join().unwrap(); + thread_o.join().unwrap(); + + Ok(()) }