feat: reload on content

This commit is contained in:
Maciej Jur 2024-07-06 01:53:28 +02:00
parent 1c2e87aa25
commit 518f9a56ed
Signed by: kamov
GPG key ID: 191CBFF5F72ECAFD
5 changed files with 358 additions and 276 deletions

View file

@ -1,7 +1,121 @@
use std::fs; use std::fs;
use std::fs::File;
use std::io;
use std::io::Write;
use std::path::Path;
use std::process::Command;
use camino::Utf8Path;
pub fn build_styles() { use crate::pipeline::{AssetKind, Output, OutputKind, Sack, Virtual};
use crate::BuildContext;
pub(crate) fn clean_dist() {
println!("Cleaning dist");
if fs::metadata("dist").is_ok() {
fs::remove_dir_all("dist").unwrap();
}
fs::create_dir("dist").unwrap();
}
pub(crate) fn build_styles() {
let css = grass::from_path("styles/styles.scss", &grass::Options::default()).unwrap(); let css = grass::from_path("styles/styles.scss", &grass::Options::default()).unwrap();
fs::write("dist/styles.css", css).unwrap(); fs::write("dist/styles.css", css).unwrap();
} }
pub(crate) fn build_content(ctx: &BuildContext, content: &[&Output]) {
let now = std::time::Instant::now();
render_all(ctx, content);
println!("Elapsed: {:.2?}", now.elapsed());
}
pub(crate) fn build_static() {
copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap();
}
pub(crate) fn build_pagefind() {
let res = Command::new("pagefind")
.args(["--site", "dist"])
.output()
.unwrap();
println!("{}", String::from_utf8(res.stdout).unwrap());
}
pub(crate) fn build_js() {
let res = Command::new("esbuild")
.arg("js/vanilla/reveal.js")
.arg("js/vanilla/photos.ts")
.arg("js/search/dist/search.js")
.arg("--format=esm")
.arg("--bundle")
.arg("--splitting")
.arg("--minify")
.arg("--outdir=dist/js/")
.output()
.unwrap();
println!("{}", String::from_utf8(res.stderr).unwrap());
}
fn copy_recursively(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let filetype = entry.file_type()?;
if filetype.is_dir() {
copy_recursively(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
}
}
Ok(())
}
fn render_all(ctx: &BuildContext, items: &[&Output]) {
for item in items {
let file = match &item.kind {
OutputKind::Asset(a) => Some(&a.meta.path),
OutputKind::Virtual(_) => None,
};
render(
item,
Sack {
ctx,
hole: items,
path: &item.path,
file,
},
);
}
}
fn render(item: &Output, sack: Sack) {
let o = Utf8Path::new("dist").join(&item.path);
fs::create_dir_all(o.parent().unwrap()).unwrap();
match item.kind {
OutputKind::Asset(ref real) => {
let i = &real.meta.path;
match &real.kind {
AssetKind::Html(closure) => {
let mut file = File::create(&o).unwrap();
file.write_all(closure(&sack).as_bytes()).unwrap();
println!("HTML: {} -> {}", i, o);
}
AssetKind::Bibtex(_) => {}
AssetKind::Image => {
fs::create_dir_all(o.parent().unwrap()).unwrap();
fs::copy(i, &o).unwrap();
println!("Image: {} -> {}", i, o);
}
};
}
OutputKind::Virtual(Virtual(ref closure)) => {
let mut file = File::create(&o).unwrap();
file.write_all(closure(&sack).as_bytes()).unwrap();
println!("Virtual: -> {}", o);
}
}
}

View file

@ -19,7 +19,6 @@ use hypertext::{Raw, Renderable};
use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem}; use pipeline::{Asset, AssetKind, Content, FileItemKind, Output, PipelineItem};
use serde::Deserialize; use serde::Deserialize;
use crate::build::build_styles;
use crate::pipeline::Virtual; use crate::pipeline::Virtual;
#[derive(Parser, Debug, Clone)] #[derive(Parser, Debug, Clone)]
@ -83,87 +82,37 @@ fn main() {
.into(), .into(),
}; };
match args.mode { let sources = &[
Mode::Build => {
build(&ctx);
}
Mode::Watch => {
build(&ctx);
watch::watch().unwrap()
}
}
}
struct Source {
path: &'static str,
exts: HashSet<&'static str>,
func: fn(PipelineItem) -> PipelineItem,
}
impl Source {
fn get(&self) -> Vec<PipelineItem> {
pipeline::gather(self.path, &self.exts)
.into_iter()
.map(self.func)
.collect()
}
}
fn build(ctx: &BuildContext) {
if fs::metadata("dist").is_ok() {
println!("Cleaning dist");
fs::remove_dir_all("dist").unwrap();
}
fs::create_dir("dist").unwrap();
let sources = vec![
Source { Source {
path: "content/about.md", path: "content/about.md",
exts: ["md"].into(), exts: ["md"].into(),
func: as_index::<crate::html::Post>, func: process_content::<crate::html::Post>,
}, },
Source { Source {
path: "content/posts/**/*", path: "content/posts/**/*",
exts: ["md", "mdx"].into(), exts: ["md", "mdx"].into(),
func: as_index::<crate::html::Post>, func: process_content::<crate::html::Post>,
}, },
Source { Source {
path: "content/slides/**/*", path: "content/slides/**/*",
exts: ["md", "lhs"].into(), exts: ["md", "lhs"].into(),
func: as_index::<crate::html::Slideshow>, func: process_content::<crate::html::Slideshow>,
}, },
Source { Source {
path: "content/wiki/**/*", path: "content/wiki/**/*",
exts: ["md"].into(), exts: ["md"].into(),
func: as_index::<crate::html::Wiki>, func: process_content::<crate::html::Wiki>,
}, },
]; ];
let assets: Vec<Output> = sources let special = &[
.iter()
.flat_map(Source::get)
.map(to_bundle)
.filter_map(|item| match item {
PipelineItem::Skip(skip) => {
println!("Skipping {}", skip.path);
None
}
PipelineItem::Take(take) => Some(take),
})
.collect();
let assets: Vec<Output> = vec![
assets,
vec![
Output { Output {
kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(), kind: Virtual::new(|sack| crate::html::map(sack).render().to_owned().into()).into(),
path: "map/index.html".into(), path: "map/index.html".into(),
link: None, link: None,
}, },
Output { Output {
kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into()) kind: Virtual::new(|sack| crate::html::search(sack).render().to_owned().into()).into(),
.into(),
path: "search/index.html".into(), path: "search/index.html".into(),
link: None, link: None,
}, },
@ -196,66 +145,94 @@ fn build(ctx: &BuildContext) {
}, },
Output { Output {
kind: Virtual::new(|sack| { kind: Virtual::new(|sack| {
crate::html::to_list(sack, sack.get_links("slides/**/*.html"), "Slideshows".into()) crate::html::to_list(
sack,
sack.get_links("slides/**/*.html"),
"Slideshows".into(),
)
}) })
.into(), .into(),
path: "slides/index.html".into(), path: "slides/index.html".into(),
link: None, link: None,
}, },
], ];
]
match args.mode {
Mode::Build => {
build(&ctx, sources, special);
}
Mode::Watch => {
build(&ctx, sources, special);
watch::watch(&ctx, sources).unwrap()
}
}
}
#[derive(Debug)]
struct Source {
pub path: &'static str,
pub exts: HashSet<&'static str>,
pub func: fn(PipelineItem) -> PipelineItem,
}
impl Source {
fn get(&self) -> Vec<PipelineItem> {
pipeline::gather(self.path, &self.exts)
.into_iter() .into_iter()
.flatten() .map(self.func)
.collect()
}
fn get_maybe(&self, path: &Utf8Path) -> Option<PipelineItem> {
let pattern = glob::Pattern::new(self.path).expect("Bad pattern");
if !pattern.matches_path(path.as_std_path()) {
return None;
};
let item = match path.is_file() {
true => Some(crate::pipeline::to_source(path.to_owned(), &self.exts)),
false => None,
};
item.map(Into::into).map(self.func)
}
}
fn build(ctx: &BuildContext, sources: &[Source], special: &[Output]) {
crate::build::clean_dist();
let sources: Vec<_> = sources
.iter()
.flat_map(Source::get)
.map(to_bundle)
.filter_map(Option::from)
.collect(); .collect();
{ let assets: Vec<_> = sources.iter().chain(special).collect();
let now = std::time::Instant::now();
pipeline::render_all(ctx, &assets); crate::build::build_content(ctx, &assets);
println!("Elapsed: {:.2?}", now.elapsed()); crate::build::build_static();
crate::build::build_styles();
crate::build::build_pagefind();
crate::build::build_js();
} }
utils::copy_recursively(std::path::Path::new("public"), std::path::Path::new("dist")).unwrap(); pub fn parse_frontmatter<D>(raw: &str) -> (D, String)
build_styles();
let res = Command::new("pagefind")
.args(["--site", "dist"])
.output()
.unwrap();
println!("{}", String::from_utf8(res.stdout).unwrap());
let res = Command::new("esbuild")
.arg("js/vanilla/reveal.js")
.arg("js/vanilla/photos.ts")
.arg("js/search/dist/search.js")
.arg("--format=esm")
.arg("--bundle")
.arg("--splitting")
.arg("--minify")
.arg("--outdir=dist/js/")
.output()
.unwrap();
println!("{}", String::from_utf8(res.stderr).unwrap());
}
pub fn parse_frontmatter<T>(raw: &str) -> (T, String)
where where
T: for<'de> Deserialize<'de>, D: for<'de> Deserialize<'de>,
{ {
let matter = Matter::<YAML>::new(); let parser = Matter::<YAML>::new();
let result = matter.parse(raw); let result = parser.parse_with_struct::<D>(raw).unwrap();
( (
// Just the front matter // Just the front matter
result.data.unwrap().deserialize::<T>().unwrap(), result.data,
// The rest of the content // The rest of the content
result.content, result.content,
) )
} }
fn as_index<T>(item: PipelineItem) -> PipelineItem fn process_content<T>(item: PipelineItem) -> PipelineItem
where where
T: for<'de> Deserialize<'de> + Content + Clone + 'static, T: for<'de> Deserialize<'de> + Content + Clone + 'static,
{ {
@ -273,16 +250,16 @@ where
match meta.path.extension() { match meta.path.extension() {
Some("md" | "mdx" | "lhs") => { Some("md" | "mdx" | "lhs") => {
let data = fs::read_to_string(&meta.path).unwrap(); let raw = fs::read_to_string(&meta.path).unwrap();
let (fm, md) = parse_frontmatter::<T>(&data); let (matter, parsed) = parse_frontmatter::<T>(&raw);
let link = T::as_link(&fm, Utf8Path::new("/").join(dir)); let link = T::as_link(&matter, Utf8Path::new("/").join(dir));
Output { Output {
kind: Asset { kind: Asset {
kind: pipeline::AssetKind::html(move |sack| { kind: pipeline::AssetKind::html(move |sack| {
let lib = sack.get_library(); let lib = sack.get_library();
let (outline, parsed, bib) = T::parse(md.clone(), lib); let (outline, parsed, bib) = T::parse(parsed.clone(), lib);
T::render(fm.clone(), sack, Raw(parsed), outline, bib) T::render(matter.clone(), sack, Raw(parsed), outline, bib)
.render() .render()
.into() .into()
}), }),

View file

@ -2,8 +2,8 @@
//! loading the data from hard drive, and then processing it further depending on the file type. //! loading the data from hard drive, and then processing it further depending on the file type.
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fs::{self, File}; use std::fmt::Debug;
use std::io::Write; use std::usize;
use camino::{Utf8Path, Utf8PathBuf}; use camino::{Utf8Path, Utf8PathBuf};
use glob::glob; use glob::glob;
@ -68,6 +68,20 @@ pub(crate) enum AssetKind {
Image, Image,
} }
impl Debug for AssetKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Html(fun) => {
// rust mental gymnastics moment
let ptr = &**fun as *const dyn Fn(&Sack) -> String as *const () as usize;
f.debug_tuple("Html").field(&ptr).finish()
},
Self::Bibtex(b) => f.debug_tuple("Bibtex").field(b).finish(),
Self::Image => write!(f, "Image"),
}
}
}
impl AssetKind { impl AssetKind {
pub fn html(f: impl Fn(&Sack) -> String + 'static) -> Self { pub fn html(f: impl Fn(&Sack) -> String + 'static) -> Self {
Self::Html(Box::new(f)) Self::Html(Box::new(f))
@ -75,6 +89,7 @@ impl AssetKind {
} }
/// Asset corresponding to a file on disk. /// Asset corresponding to a file on disk.
#[derive(Debug)]
pub(crate) struct Asset { pub(crate) struct Asset {
/// The kind of a processed asset. /// The kind of a processed asset.
pub kind: AssetKind, pub kind: AssetKind,
@ -84,7 +99,7 @@ pub(crate) struct Asset {
/// Dynamically generated asset not corresponding to any file on disk. This is useful when the /// Dynamically generated asset not corresponding to any file on disk. This is useful when the
/// generated page is not a content page, e.g. page list. /// generated page is not a content page, e.g. page list.
pub(crate) struct Virtual(Box<dyn Fn(&Sack) -> String>); pub(crate) struct Virtual(pub Box<dyn Fn(&Sack) -> String>);
impl Virtual { impl Virtual {
pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self { pub fn new(call: impl Fn(&Sack) -> String + 'static) -> Self {
@ -92,7 +107,16 @@ impl Virtual {
} }
} }
impl Debug for Virtual {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
// rust mental gymnastics moment
let ptr = &*self.0 as *const dyn Fn(&Sack) -> String as *const () as usize;
f.debug_tuple("Virtual").field(&ptr).finish()
}
}
/// The kind of an output item. /// The kind of an output item.
#[derive(Debug)]
pub(crate) enum OutputKind { pub(crate) enum OutputKind {
/// Marks an output item which corresponds to a file on disk. /// Marks an output item which corresponds to a file on disk.
Asset(Asset), Asset(Asset),
@ -113,6 +137,7 @@ impl From<Virtual> for OutputKind {
} }
/// Renderable output /// Renderable output
#[derive(Debug)]
pub(crate) struct Output { pub(crate) struct Output {
/// The kind of an output item /// The kind of an output item
pub(crate) kind: OutputKind, pub(crate) kind: OutputKind,
@ -124,6 +149,7 @@ pub(crate) struct Output {
/// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as /// Items currently in the pipeline. In order for an item to be rendered, it needs to be marked as
/// `Take`, which means it needs to have an output location assigned to itself. /// `Take`, which means it needs to have an output location assigned to itself.
#[derive(Debug)]
pub(crate) enum PipelineItem { pub(crate) enum PipelineItem {
/// Unclaimed file. /// Unclaimed file.
Skip(FileItem), Skip(FileItem),
@ -143,16 +169,25 @@ impl From<Output> for PipelineItem {
} }
} }
impl From<PipelineItem> for Option<Output> {
fn from(value: PipelineItem) -> Self {
match value {
PipelineItem::Skip(_) => None,
PipelineItem::Take(e) => Some(e),
}
}
}
/// This struct allows for querying the website hierarchy. It is passed to each rendered website /// This struct allows for querying the website hierarchy. It is passed to each rendered website
/// page, so that it can easily access the website metadata. /// page, so that it can easily access the website metadata.
pub(crate) struct Sack<'a> { pub(crate) struct Sack<'a> {
pub ctx: &'a BuildContext, pub ctx: &'a BuildContext,
/// Literally all of the content /// Literally all of the content
hole: &'a [Output], pub hole: &'a [&'a Output],
/// Current path for the page being rendered /// Current path for the page being rendered
path: &'a Utf8PathBuf, pub path: &'a Utf8PathBuf,
/// Original file location for this page /// Original file location for this page
file: Option<&'a Utf8PathBuf>, pub file: Option<&'a Utf8PathBuf>,
} }
impl<'a> Sack<'a> { impl<'a> Sack<'a> {
@ -254,7 +289,7 @@ pub fn gather(pattern: &str, exts: &HashSet<&'static str>) -> Vec<PipelineItem>
.collect() .collect()
} }
fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem { pub(crate) fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem {
let hit = path.extension().map_or(false, |ext| exts.contains(ext)); let hit = path.extension().map_or(false, |ext| exts.contains(ext));
let kind = match hit { let kind = match hit {
@ -264,51 +299,3 @@ fn to_source(path: Utf8PathBuf, exts: &HashSet<&'static str>) -> FileItem {
FileItem { kind, path } FileItem { kind, path }
} }
pub fn render_all(ctx: &BuildContext, items: &[Output]) {
for item in items {
let file = match &item.kind {
OutputKind::Asset(a) => Some(&a.meta.path),
OutputKind::Virtual(_) => None,
};
render(
item,
Sack {
ctx,
hole: items,
path: &item.path,
file,
},
);
}
}
fn render(item: &Output, sack: Sack) {
let o = Utf8Path::new("dist").join(&item.path);
fs::create_dir_all(o.parent().unwrap()).unwrap();
match item.kind {
OutputKind::Asset(ref real) => {
let i = &real.meta.path;
match &real.kind {
AssetKind::Html(closure) => {
let mut file = File::create(&o).unwrap();
file.write_all(closure(&sack).as_bytes()).unwrap();
println!("HTML: {} -> {}", i, o);
}
AssetKind::Bibtex(_) => {}
AssetKind::Image => {
fs::create_dir_all(o.parent().unwrap()).unwrap();
fs::copy(i, &o).unwrap();
println!("Image: {} -> {}", i, o);
}
};
}
OutputKind::Virtual(Virtual(ref closure)) => {
let mut file = File::create(&o).unwrap();
file.write_all(closure(&sack).as_bytes()).unwrap();
println!("Virtual: -> {}", o);
}
}
}

View file

@ -1,18 +0,0 @@
use std::fs;
use std::io;
use std::path::Path;
pub fn copy_recursively(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let filetype = entry.file_type()?;
if filetype.is_dir() {
copy_recursively(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
}
}
Ok(())
}

View file

@ -1,3 +1,4 @@
use std::env;
use std::io::Result; use std::io::Result;
use std::net::{TcpListener, TcpStream}; use std::net::{TcpListener, TcpStream};
use std::path::Path; use std::path::Path;
@ -6,12 +7,14 @@ use std::sync::{Arc, Mutex};
use std::thread::JoinHandle; use std::thread::JoinHandle;
use std::time::Duration; use std::time::Duration;
use camino::Utf8PathBuf;
use notify::RecursiveMode; use notify::RecursiveMode;
use notify_debouncer_mini::new_debouncer; use notify_debouncer_mini::new_debouncer;
use tungstenite::WebSocket; use tungstenite::WebSocket;
use crate::build::build_styles; use crate::build::{build_content, build_styles};
use crate::pipeline::Output;
use crate::{BuildContext, Source};
fn new_thread_ws_incoming( fn new_thread_ws_incoming(
server: TcpListener, server: TcpListener,
@ -66,23 +69,42 @@ fn new_thread_ws_reload(
(tx, thread) (tx, thread)
} }
pub fn watch(ctx: &BuildContext, sources: &[Source]) -> Result<()> {
pub fn watch() -> Result<()> { let root = env::current_dir().unwrap();
let server = TcpListener::bind("127.0.0.1:1337")?; let server = TcpListener::bind("127.0.0.1:1337")?;
let client = Arc::new(Mutex::new(vec![])); let client = Arc::new(Mutex::new(vec![]));
let (tx, rx) = std::sync::mpsc::channel(); let (tx, rx) = std::sync::mpsc::channel();
let mut debouncer = new_debouncer(Duration::from_secs(2), tx).unwrap(); let mut debouncer = new_debouncer(Duration::from_millis(250), tx).unwrap();
debouncer debouncer
.watcher() .watcher()
.watch(Path::new("./styles"), RecursiveMode::Recursive) .watch(Path::new("styles"), RecursiveMode::Recursive)
.unwrap();
debouncer
.watcher()
.watch(Path::new("content"), RecursiveMode::Recursive)
.unwrap(); .unwrap();
let thread_i = new_thread_ws_incoming(server, client.clone()); let thread_i = new_thread_ws_incoming(server, client.clone());
let (tx_reload, thread_o) = new_thread_ws_reload(client.clone()); let (tx_reload, thread_o) = new_thread_ws_reload(client.clone());
while let Ok(_ev) = rx.recv().unwrap() { while let Ok(events) = rx.recv().unwrap() {
let items = events
.into_iter()
.filter_map(|event| {
Utf8PathBuf::from_path_buf(event.path)
.ok()
.and_then(|path| path.strip_prefix(&root).ok().map(ToOwned::to_owned))
.and_then(|path| sources.iter().find_map(|s| s.get_maybe(&path)))
})
.filter_map(Option::from)
.collect::<Vec<Output>>();
let items = items.iter().collect::<Vec<_>>();
build_content(ctx, &items);
build_styles(); build_styles();
tx_reload.send(()).unwrap(); tx_reload.send(()).unwrap();
} }